[tor-commits] [arm/master] Renaming files to conform with PEP8

atagar at torproject.org atagar at torproject.org
Mon Jan 27 02:32:35 UTC 2014


commit bbb6b2833a756b7e75847d7f62baac4558d77659
Author: Damian Johnson <atagar at torproject.org>
Date:   Sat Jan 18 21:29:44 2014 -0800

    Renaming files to conform with PEP8
    
    While our contents conformed with PEP8 our file names were still camel case.
---
 arm/__init__.py                     |    2 +-
 arm/configPanel.py                  |  726 -------------------
 arm/config_panel.py                 |  726 +++++++++++++++++++
 arm/connections/__init__.py         |    2 +-
 arm/connections/circEntry.py        |  216 ------
 arm/connections/circ_entry.py       |  216 ++++++
 arm/connections/connEntry.py        |  957 ------------------------
 arm/connections/connPanel.py        |  668 -----------------
 arm/connections/conn_entry.py       |  957 ++++++++++++++++++++++++
 arm/connections/conn_panel.py       |  668 +++++++++++++++++
 arm/connections/countPopup.py       |  113 ---
 arm/connections/count_popup.py      |  113 +++
 arm/connections/descriptorPopup.py  |  272 -------
 arm/connections/descriptor_popup.py |  272 +++++++
 arm/controller.py                   |   52 +-
 arm/graphing/__init__.py            |    2 +-
 arm/graphing/bandwidthStats.py      |  508 -------------
 arm/graphing/bandwidth_stats.py     |  508 +++++++++++++
 arm/graphing/connStats.py           |   72 --
 arm/graphing/conn_stats.py          |   72 ++
 arm/graphing/graphPanel.py          |  596 ---------------
 arm/graphing/graph_panel.py         |  596 +++++++++++++++
 arm/graphing/resourceStats.py       |   63 --
 arm/graphing/resource_stats.py      |   63 ++
 arm/headerPanel.py                  |  679 -----------------
 arm/header_panel.py                 |  679 +++++++++++++++++
 arm/logPanel.py                     | 1371 -----------------------------------
 arm/log_panel.py                    | 1371 +++++++++++++++++++++++++++++++++++
 arm/menu/actions.py                 |   18 +-
 arm/menu/menu.py                    |   10 +-
 arm/popups.py                       |   12 +-
 arm/starter.py                      |   14 +-
 arm/torrcPanel.py                   |  353 ---------
 arm/torrc_panel.py                  |  353 +++++++++
 arm/util/__init__.py                |    2 +-
 arm/util/panel.py                   |   10 +-
 arm/util/textInput.py               |  213 ------
 arm/util/text_input.py              |  213 ++++++
 arm/util/torConfig.py               | 1301 ---------------------------------
 arm/util/torTools.py                | 1039 --------------------------
 arm/util/tor_config.py              | 1301 +++++++++++++++++++++++++++++++++
 arm/util/tor_tools.py               | 1039 ++++++++++++++++++++++++++
 arm/util/uiTools.py                 |  628 ----------------
 arm/util/ui_tools.py                |  628 ++++++++++++++++
 44 files changed, 9837 insertions(+), 9837 deletions(-)

diff --git a/arm/__init__.py b/arm/__init__.py
index 8147e38..c203b5f 100644
--- a/arm/__init__.py
+++ b/arm/__init__.py
@@ -2,7 +2,7 @@
 Scripts involved in validating user input, system state, and initializing arm.
 """
 
-__all__ = ["starter", "prereq", "version", "configPanel", "controller", "headerPanel", "logPanel", "popups", "torrcPanel"]
+__all__ = ["starter", "prereq", "version", "config_panel", "controller", "header_panel", "log_panel", "popups", "torrc_panel"]
 
 __version__ = '1.4.6_dev'
 __release_date__ = 'April 28, 2011'
diff --git a/arm/configPanel.py b/arm/configPanel.py
deleted file mode 100644
index d78d4d4..0000000
--- a/arm/configPanel.py
+++ /dev/null
@@ -1,726 +0,0 @@
-"""
-Panel presenting the configuration state for tor or arm. Options can be edited
-and the resulting configuration files saved.
-"""
-
-import curses
-import threading
-
-import arm.controller
-import popups
-
-from arm.util import panel, torConfig, torTools, uiTools
-
-import stem.control
-
-from stem.util import conf, enum, str_tools
-
-# TODO: The arm use cases are incomplete since they currently can't be
-# modified, have their descriptions fetched, or even get a complete listing
-# of what's available.
-
-State = enum.Enum("TOR", "ARM")  # state to be presented
-
-# mappings of option categories to the color for their entries
-
-CATEGORY_COLOR = {
-  torConfig.Category.GENERAL: "green",
-  torConfig.Category.CLIENT: "blue",
-  torConfig.Category.RELAY: "yellow",
-  torConfig.Category.DIRECTORY: "magenta",
-  torConfig.Category.AUTHORITY: "red",
-  torConfig.Category.HIDDEN_SERVICE: "cyan",
-  torConfig.Category.TESTING: "white",
-  torConfig.Category.UNKNOWN: "white",
-}
-
-# attributes of a ConfigEntry
-
-Field = enum.Enum(
-  "CATEGORY",
-  "OPTION",
-  "VALUE",
-  "TYPE",
-  "ARG_USAGE",
-  "SUMMARY",
-  "DESCRIPTION",
-  "MAN_ENTRY",
-  "IS_DEFAULT",
-)
-
-FIELD_ATTR = {
-  Field.CATEGORY: ("Category", "red"),
-  Field.OPTION: ("Option Name", "blue"),
-  Field.VALUE: ("Value", "cyan"),
-  Field.TYPE: ("Arg Type", "green"),
-  Field.ARG_USAGE: ("Arg Usage", "yellow"),
-  Field.SUMMARY: ("Summary", "green"),
-  Field.DESCRIPTION: ("Description", "white"),
-  Field.MAN_ENTRY: ("Man Page Entry", "blue"),
-  Field.IS_DEFAULT: ("Is Default", "magenta"),
-}
-
-
-def conf_handler(key, value):
-  if key == "features.config.selectionDetails.height":
-    return max(0, value)
-  elif key == "features.config.state.colWidth.option":
-    return max(5, value)
-  elif key == "features.config.state.colWidth.value":
-    return max(5, value)
-  elif key == "features.config.order":
-    return conf.parse_enum_csv(key, value[0], Field, 3)
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.config.order": [Field.MAN_ENTRY, Field.OPTION, Field.IS_DEFAULT],
-  "features.config.selectionDetails.height": 6,
-  "features.config.prepopulateEditValues": True,
-  "features.config.state.showPrivateOptions": False,
-  "features.config.state.showVirtualOptions": False,
-  "features.config.state.colWidth.option": 25,
-  "features.config.state.colWidth.value": 15,
-}, conf_handler)
-
-
-def get_field_from_label(field_label):
-  """
-  Converts field labels back to their enumeration, raising a ValueError if it
-  doesn't exist.
-  """
-
-  for entry_enum in FIELD_ATTR:
-    if field_label == FIELD_ATTR[entry_enum][0]:
-      return entry_enum
-
-
-class ConfigEntry():
-  """
-  Configuration option in the panel.
-  """
-
-  def __init__(self, option, type, is_default):
-    self.fields = {}
-    self.fields[Field.OPTION] = option
-    self.fields[Field.TYPE] = type
-    self.fields[Field.IS_DEFAULT] = is_default
-
-    # Fetches extra infromation from external sources (the arm config and tor
-    # man page). These are None if unavailable for this config option.
-
-    summary = torConfig.get_config_summary(option)
-    man_entry = torConfig.get_config_description(option)
-
-    if man_entry:
-      self.fields[Field.MAN_ENTRY] = man_entry.index
-      self.fields[Field.CATEGORY] = man_entry.category
-      self.fields[Field.ARG_USAGE] = man_entry.arg_usage
-      self.fields[Field.DESCRIPTION] = man_entry.description
-    else:
-      self.fields[Field.MAN_ENTRY] = 99999  # sorts non-man entries last
-      self.fields[Field.CATEGORY] = torConfig.Category.UNKNOWN
-      self.fields[Field.ARG_USAGE] = ""
-      self.fields[Field.DESCRIPTION] = ""
-
-    # uses the full man page description if a summary is unavailable
-
-    self.fields[Field.SUMMARY] = summary if summary is not None else self.fields[Field.DESCRIPTION]
-
-    # cache of what's displayed for this configuration option
-
-    self.label_cache = None
-    self.label_cache_args = None
-
-  def get(self, field):
-    """
-    Provides back the value in the given field.
-
-    Arguments:
-      field - enum for the field to be provided back
-    """
-
-    if field == Field.VALUE:
-      return self._get_value()
-    else:
-      return self.fields[field]
-
-  def get_all(self, fields):
-    """
-    Provides back a list with the given field values.
-
-    Arguments:
-      field - enums for the fields to be provided back
-    """
-
-    return [self.get(field) for field in fields]
-
-  def get_label(self, option_width, value_width, summary_width):
-    """
-    Provides display string of the configuration entry with the given
-    constraints on the width of the contents.
-
-    Arguments:
-      option_width  - width of the option column
-      value_width   - width of the value column
-      summary_width - width of the summary column
-    """
-
-    # Fetching the display entries is very common so this caches the values.
-    # Doing this substantially drops cpu usage when scrolling (by around 40%).
-
-    arg_set = (option_width, value_width, summary_width)
-
-    if not self.label_cache or self.label_cache_args != arg_set:
-      option_label = uiTools.crop_str(self.get(Field.OPTION), option_width)
-      value_label = uiTools.crop_str(self.get(Field.VALUE), value_width)
-      summary_label = uiTools.crop_str(self.get(Field.SUMMARY), summary_width, None)
-      line_text_layout = "%%-%is %%-%is %%-%is" % (option_width, value_width, summary_width)
-      self.label_cache = line_text_layout % (option_label, value_label, summary_label)
-      self.label_cache_args = arg_set
-
-    return self.label_cache
-
-  def is_unset(self):
-    """
-    True if we have no value, false otherwise.
-    """
-
-    conf_value = torTools.get_conn().get_option(self.get(Field.OPTION), [], True)
-
-    return not bool(conf_value)
-
-  def _get_value(self):
-    """
-    Provides the current value of the configuration entry, taking advantage of
-    the torTools caching to effectively query the accurate value. This uses the
-    value's type to provide a user friendly representation if able.
-    """
-
-    conf_value = ", ".join(torTools.get_conn().get_option(self.get(Field.OPTION), [], True))
-
-    # provides nicer values for recognized types
-
-    if not conf_value:
-      conf_value = "<none>"
-    elif self.get(Field.TYPE) == "Boolean" and conf_value in ("0", "1"):
-      conf_value = "False" if conf_value == "0" else "True"
-    elif self.get(Field.TYPE) == "DataSize" and conf_value.isdigit():
-      conf_value = str_tools.get_size_label(int(conf_value))
-    elif self.get(Field.TYPE) == "TimeInterval" and conf_value.isdigit():
-      conf_value = str_tools.get_time_label(int(conf_value), is_long = True)
-
-    return conf_value
-
-
-class ConfigPanel(panel.Panel):
-  """
-  Renders a listing of the tor or arm configuration state, allowing options to
-  be selected and edited.
-  """
-
-  def __init__(self, stdscr, config_type):
-    panel.Panel.__init__(self, stdscr, "configuration", 0)
-
-    self.config_type = config_type
-    self.conf_contents = []
-    self.conf_important_contents = []
-    self.scroller = uiTools.Scroller(True)
-    self.vals_lock = threading.RLock()
-
-    # shows all configuration options if true, otherwise only the ones with
-    # the 'important' flag are shown
-
-    self.show_all = False
-
-    # initializes config contents if we're connected
-
-    conn = torTools.get_conn()
-    conn.add_status_listener(self.reset_listener)
-
-    if conn.is_alive():
-      self.reset_listener(None, stem.control.State.INIT, None)
-
-  def reset_listener(self, controller, event_type, _):
-    # fetches configuration options if a new instance, otherewise keeps our
-    # current contents
-
-    if event_type == stem.control.State.INIT:
-      self._load_config_options()
-
-  def _load_config_options(self):
-    """
-    Fetches the configuration options available from tor or arm.
-    """
-
-    self.conf_contents = []
-    self.conf_important_contents = []
-
-    if self.config_type == State.TOR:
-      conn, config_option_lines = torTools.get_conn(), []
-      custom_options = torConfig.get_custom_options()
-      config_option_query = conn.get_info("config/names", None)
-
-      if config_option_query:
-        config_option_lines = config_option_query.strip().split("\n")
-
-      for line in config_option_lines:
-        # lines are of the form "<option> <type>[ <documentation>]", like:
-        # UseEntryGuards Boolean
-        # documentation is aparently only in older versions (for instance,
-        # 0.2.1.25)
-
-        line_comp = line.strip().split(" ")
-        conf_option, conf_type = line_comp[0], line_comp[1]
-
-        # skips private and virtual entries if not configured to show them
-
-        if not CONFIG["features.config.state.showPrivateOptions"] and conf_option.startswith("__"):
-          continue
-        elif not CONFIG["features.config.state.showVirtualOptions"] and conf_type == "Virtual":
-          continue
-
-        self.conf_contents.append(ConfigEntry(conf_option, conf_type, not conf_option in custom_options))
-
-    elif self.config_type == State.ARM:
-      # loaded via the conf utility
-
-      arm_config = conf.get_config("arm")
-
-      for key in arm_config.keys():
-        pass  # TODO: implement
-
-    # mirror listing with only the important configuration options
-
-    self.conf_important_contents = []
-
-    for entry in self.conf_contents:
-      if torConfig.is_important(entry.get(Field.OPTION)):
-        self.conf_important_contents.append(entry)
-
-    # if there aren't any important options then show everything
-
-    if not self.conf_important_contents:
-      self.conf_important_contents = self.conf_contents
-
-    self.set_sort_order()  # initial sorting of the contents
-
-  def get_selection(self):
-    """
-    Provides the currently selected entry.
-    """
-
-    return self.scroller.get_cursor_selection(self._get_config_options())
-
-  def set_filtering(self, is_filtered):
-    """
-    Sets if configuration options are filtered or not.
-
-    Arguments:
-      is_filtered - if true then only relatively important options will be
-                   shown, otherwise everything is shown
-    """
-
-    self.show_all = not is_filtered
-
-  def set_sort_order(self, ordering = None):
-    """
-    Sets the configuration attributes we're sorting by and resorts the
-    contents.
-
-    Arguments:
-      ordering - new ordering, if undefined then this resorts with the last
-                 set ordering
-    """
-
-    self.vals_lock.acquire()
-
-    if ordering:
-      CONFIG["features.config.order"] = ordering
-
-    self.conf_contents.sort(key=lambda i: (i.get_all(CONFIG["features.config.order"])))
-    self.conf_important_contents.sort(key=lambda i: (i.get_all(CONFIG["features.config.order"])))
-    self.vals_lock.release()
-
-  def show_sort_dialog(self):
-    """
-    Provides the sort dialog for our configuration options.
-    """
-
-    # set ordering for config options
-
-    title_label = "Config Option Ordering:"
-    options = [FIELD_ATTR[field][0] for field in Field]
-    old_selection = [FIELD_ATTR[field][0] for field in CONFIG["features.config.order"]]
-    option_colors = dict([FIELD_ATTR[field] for field in Field])
-    results = popups.show_sort_dialog(title_label, options, old_selection, option_colors)
-
-    if results:
-      # converts labels back to enums
-      result_enums = [get_field_from_label(label) for label in results]
-      self.set_sort_order(result_enums)
-
-  def handle_key(self, key):
-    self.vals_lock.acquire()
-    is_keystroke_consumed = True
-
-    if uiTools.is_scroll_key(key):
-      page_height = self.get_preferred_size()[0] - 1
-      detail_panel_height = CONFIG["features.config.selectionDetails.height"]
-
-      if detail_panel_height > 0 and detail_panel_height + 2 <= page_height:
-        page_height -= (detail_panel_height + 1)
-
-      is_changed = self.scroller.handle_key(key, self._get_config_options(), page_height)
-
-      if is_changed:
-        self.redraw(True)
-    elif uiTools.is_selection_key(key) and self._get_config_options():
-      # Prompts the user to edit the selected configuration value. The
-      # interface is locked to prevent updates between setting the value
-      # and showing any errors.
-
-      panel.CURSES_LOCK.acquire()
-
-      try:
-        selection = self.get_selection()
-        config_option = selection.get(Field.OPTION)
-
-        if selection.is_unset():
-          initial_value = ""
-        else:
-          initial_value = selection.get(Field.VALUE)
-
-        prompt_msg = "%s Value (esc to cancel): " % config_option
-        is_prepopulated = CONFIG["features.config.prepopulateEditValues"]
-        new_value = popups.input_prompt(prompt_msg, initial_value if is_prepopulated else "")
-
-        if new_value is not None and new_value != initial_value:
-          try:
-            if selection.get(Field.TYPE) == "Boolean":
-              # if the value's a boolean then allow for 'true' and 'false' inputs
-
-              if new_value.lower() == "true":
-                new_value = "1"
-              elif new_value.lower() == "false":
-                new_value = "0"
-            elif selection.get(Field.TYPE) == "LineList":
-              # set_option accepts list inputs when there's multiple values
-              new_value = new_value.split(",")
-
-            torTools.get_conn().set_option(config_option, new_value)
-
-            # forces the label to be remade with the new value
-
-            selection.label_cache = None
-
-            # resets the is_default flag
-
-            custom_options = torConfig.get_custom_options()
-            selection.fields[Field.IS_DEFAULT] = not config_option in custom_options
-
-            self.redraw(True)
-          except Exception as exc:
-            popups.show_msg("%s (press any key)" % exc)
-      finally:
-        panel.CURSES_LOCK.release()
-    elif key == ord('a') or key == ord('A'):
-      self.show_all = not self.show_all
-      self.redraw(True)
-    elif key == ord('s') or key == ord('S'):
-      self.show_sort_dialog()
-    elif key == ord('v') or key == ord('V'):
-      self.show_write_dialog()
-    else:
-      is_keystroke_consumed = False
-
-    self.vals_lock.release()
-    return is_keystroke_consumed
-
-  def show_write_dialog(self):
-    """
-    Provies an interface to confirm if the configuration is saved and, if so,
-    where.
-    """
-
-    # display a popup for saving the current configuration
-
-    config_lines = torConfig.get_custom_options(True)
-    popup, width, height = popups.init(len(config_lines) + 2)
-
-    if not popup:
-      return
-
-    try:
-      # displayed options (truncating the labels if there's limited room)
-
-      if width >= 30:
-        selection_options = ("Save", "Save As...", "Cancel")
-      else:
-        selection_options = ("Save", "Save As", "X")
-
-      # checks if we can show options beside the last line of visible content
-
-      is_option_line_separate = False
-      last_index = min(height - 2, len(config_lines) - 1)
-
-      # if we don't have room to display the selection options and room to
-      # grow then display the selection options on its own line
-
-      if width < (30 + len(config_lines[last_index])):
-        popup.set_height(height + 1)
-        popup.redraw(True)  # recreates the window instance
-        new_height, _ = popup.get_preferred_size()
-
-        if new_height > height:
-          height = new_height
-          is_option_line_separate = True
-
-      key, selection = 0, 2
-
-      while not uiTools.is_selection_key(key):
-        # if the popup has been resized then recreate it (needed for the
-        # proper border height)
-
-        new_height, new_width = popup.get_preferred_size()
-
-        if (height, width) != (new_height, new_width):
-          height, width = new_height, new_width
-          popup.redraw(True)
-
-        # if there isn't room to display the popup then cancel it
-
-        if height <= 2:
-          selection = 2
-          break
-
-        popup.win.erase()
-        popup.win.box()
-        popup.addstr(0, 0, "Configuration being saved:", curses.A_STANDOUT)
-
-        visible_config_lines = height - 3 if is_option_line_separate else height - 2
-
-        for i in range(visible_config_lines):
-          line = uiTools.crop_str(config_lines[i], width - 2)
-
-          if " " in line:
-            option, arg = line.split(" ", 1)
-            popup.addstr(i + 1, 1, option, curses.A_BOLD | uiTools.get_color("green"))
-            popup.addstr(i + 1, len(option) + 2, arg, curses.A_BOLD | uiTools.get_color("cyan"))
-          else:
-            popup.addstr(i + 1, 1, line, curses.A_BOLD | uiTools.get_color("green"))
-
-        # draws selection options (drawn right to left)
-
-        draw_x = width - 1
-
-        for i in range(len(selection_options) - 1, -1, -1):
-          option_label = selection_options[i]
-          draw_x -= (len(option_label) + 2)
-
-          # if we've run out of room then drop the option (this will only
-          # occure on tiny displays)
-
-          if draw_x < 1:
-            break
-
-          selection_format = curses.A_STANDOUT if i == selection else curses.A_NORMAL
-          popup.addstr(height - 2, draw_x, "[")
-          popup.addstr(height - 2, draw_x + 1, option_label, selection_format | curses.A_BOLD)
-          popup.addstr(height - 2, draw_x + len(option_label) + 1, "]")
-
-          draw_x -= 1  # space gap between the options
-
-        popup.win.refresh()
-
-        key = arm.controller.get_controller().get_screen().getch()
-
-        if key == curses.KEY_LEFT:
-          selection = max(0, selection - 1)
-        elif key == curses.KEY_RIGHT:
-          selection = min(len(selection_options) - 1, selection + 1)
-
-      if selection in (0, 1):
-        loaded_torrc, prompt_canceled = torConfig.get_torrc(), False
-
-        try:
-          config_location = loaded_torrc.get_config_location()
-        except IOError:
-          config_location = ""
-
-        if selection == 1:
-          # prompts user for a configuration location
-          config_location = popups.input_prompt("Save to (esc to cancel): ", config_location)
-
-          if not config_location:
-            prompt_canceled = True
-
-        if not prompt_canceled:
-          try:
-            torConfig.save_conf(config_location, config_lines)
-            msg = "Saved configuration to %s" % config_location
-          except IOError as exc:
-            msg = "Unable to save configuration (%s)" % exc.strerror
-
-          popups.show_msg(msg, 2)
-    finally:
-      popups.finalize()
-
-  def get_help(self):
-    options = []
-    options.append(("up arrow", "scroll up a line", None))
-    options.append(("down arrow", "scroll down a line", None))
-    options.append(("page up", "scroll up a page", None))
-    options.append(("page down", "scroll down a page", None))
-    options.append(("enter", "edit configuration option", None))
-    options.append(("v", "save configuration", None))
-    options.append(("a", "toggle option filtering", None))
-    options.append(("s", "sort ordering", None))
-    return options
-
-  def draw(self, width, height):
-    self.vals_lock.acquire()
-
-    # panel with details for the current selection
-
-    detail_panel_height = CONFIG["features.config.selectionDetails.height"]
-    is_scrollbar_visible = False
-
-    if detail_panel_height == 0 or detail_panel_height + 2 >= height:
-      # no detail panel
-
-      detail_panel_height = 0
-      scroll_location = self.scroller.get_scroll_location(self._get_config_options(), height - 1)
-      cursor_selection = self.get_selection()
-      is_scrollbar_visible = len(self._get_config_options()) > height - 1
-    else:
-      # Shrink detail panel if there isn't sufficient room for the whole
-      # thing. The extra line is for the bottom border.
-
-      detail_panel_height = min(height - 1, detail_panel_height + 1)
-      scroll_location = self.scroller.get_scroll_location(self._get_config_options(), height - 1 - detail_panel_height)
-      cursor_selection = self.get_selection()
-      is_scrollbar_visible = len(self._get_config_options()) > height - detail_panel_height - 1
-
-      if cursor_selection is not None:
-        self._draw_selection_panel(cursor_selection, width, detail_panel_height, is_scrollbar_visible)
-
-    # draws the top label
-
-    if self.is_title_visible():
-      config_type = "Tor" if self.config_type == State.TOR else "Arm"
-      hidden_msg = "press 'a' to hide most options" if self.show_all else "press 'a' to show all options"
-      title_label = "%s Configuration (%s):" % (config_type, hidden_msg)
-      self.addstr(0, 0, title_label, curses.A_STANDOUT)
-
-    # draws left-hand scroll bar if content's longer than the height
-
-    scroll_offset = 1
-
-    if is_scrollbar_visible:
-      scroll_offset = 3
-      self.add_scroll_bar(scroll_location, scroll_location + height - detail_panel_height - 1, len(self._get_config_options()), 1 + detail_panel_height)
-
-    option_width = CONFIG["features.config.state.colWidth.option"]
-    value_width = CONFIG["features.config.state.colWidth.value"]
-    description_width = max(0, width - scroll_offset - option_width - value_width - 2)
-
-    # if the description column is overly long then use its space for the
-    # value instead
-
-    if description_width > 80:
-      value_width += description_width - 80
-      description_width = 80
-
-    for line_number in range(scroll_location, len(self._get_config_options())):
-      entry = self._get_config_options()[line_number]
-      draw_line = line_number + detail_panel_height + 1 - scroll_location
-
-      line_format = curses.A_NORMAL if entry.get(Field.IS_DEFAULT) else curses.A_BOLD
-
-      if entry.get(Field.CATEGORY):
-        line_format |= uiTools.get_color(CATEGORY_COLOR[entry.get(Field.CATEGORY)])
-
-      if entry == cursor_selection:
-        line_format |= curses.A_STANDOUT
-
-      line_text = entry.get_label(option_width, value_width, description_width)
-      self.addstr(draw_line, scroll_offset, line_text, line_format)
-
-      if draw_line >= height:
-        break
-
-    self.vals_lock.release()
-
-  def _get_config_options(self):
-    return self.conf_contents if self.show_all else self.conf_important_contents
-
-  def _draw_selection_panel(self, selection, width, detail_panel_height, is_scrollbar_visible):
-    """
-    Renders a panel for the selected configuration option.
-    """
-
-    # This is a solid border unless the scrollbar is visible, in which case a
-    # 'T' pipe connects the border to the bar.
-
-    uiTools.draw_box(self, 0, 0, width, detail_panel_height + 1)
-
-    if is_scrollbar_visible:
-      self.addch(detail_panel_height, 1, curses.ACS_TTEE)
-
-    selection_format = curses.A_BOLD | uiTools.get_color(CATEGORY_COLOR[selection.get(Field.CATEGORY)])
-
-    # first entry:
-    # <option> (<category> Option)
-
-    option_label = " (%s Option)" % selection.get(Field.CATEGORY)
-    self.addstr(1, 2, selection.get(Field.OPTION) + option_label, selection_format)
-
-    # second entry:
-    # Value: <value> ([default|custom], <type>, usage: <argument usage>)
-
-    if detail_panel_height >= 3:
-      value_attr = []
-      value_attr.append("default" if selection.get(Field.IS_DEFAULT) else "custom")
-      value_attr.append(selection.get(Field.TYPE))
-      value_attr.append("usage: %s" % (selection.get(Field.ARG_USAGE)))
-      value_attr_label = ", ".join(value_attr)
-
-      value_label_width = width - 12 - len(value_attr_label)
-      value_label = uiTools.crop_str(selection.get(Field.VALUE), value_label_width)
-
-      self.addstr(2, 2, "Value: %s (%s)" % (value_label, value_attr_label), selection_format)
-
-    # remainder is filled with the man page description
-
-    description_height = max(0, detail_panel_height - 3)
-    description_content = "Description: " + selection.get(Field.DESCRIPTION)
-
-    for i in range(description_height):
-      # checks if we're done writing the description
-
-      if not description_content:
-        break
-
-      # there's a leading indent after the first line
-
-      if i > 0:
-        description_content = "  " + description_content
-
-      # we only want to work with content up until the next newline
-
-      if "\n" in description_content:
-        line_content, description_content = description_content.split("\n", 1)
-      else:
-        line_content, description_content = description_content, ""
-
-      if i != description_height - 1:
-        # there's more lines to display
-
-        msg, remainder = uiTools.crop_str(line_content, width - 3, 4, 4, uiTools.Ending.HYPHEN, True)
-        description_content = remainder.strip() + description_content
-      else:
-        # this is the last line, end it with an ellipse
-
-        msg = uiTools.crop_str(line_content, width - 3, 4, 4)
-
-      self.addstr(3 + i, 2, msg, selection_format)
diff --git a/arm/config_panel.py b/arm/config_panel.py
new file mode 100644
index 0000000..c2c2ee5
--- /dev/null
+++ b/arm/config_panel.py
@@ -0,0 +1,726 @@
+"""
+Panel presenting the configuration state for tor or arm. Options can be edited
+and the resulting configuration files saved.
+"""
+
+import curses
+import threading
+
+import arm.controller
+import popups
+
+from arm.util import panel, tor_config, tor_tools, ui_tools
+
+import stem.control
+
+from stem.util import conf, enum, str_tools
+
+# TODO: The arm use cases are incomplete since they currently can't be
+# modified, have their descriptions fetched, or even get a complete listing
+# of what's available.
+
+State = enum.Enum("TOR", "ARM")  # state to be presented
+
+# mappings of option categories to the color for their entries
+
+CATEGORY_COLOR = {
+  tor_config.Category.GENERAL: "green",
+  tor_config.Category.CLIENT: "blue",
+  tor_config.Category.RELAY: "yellow",
+  tor_config.Category.DIRECTORY: "magenta",
+  tor_config.Category.AUTHORITY: "red",
+  tor_config.Category.HIDDEN_SERVICE: "cyan",
+  tor_config.Category.TESTING: "white",
+  tor_config.Category.UNKNOWN: "white",
+}
+
+# attributes of a ConfigEntry
+
+Field = enum.Enum(
+  "CATEGORY",
+  "OPTION",
+  "VALUE",
+  "TYPE",
+  "ARG_USAGE",
+  "SUMMARY",
+  "DESCRIPTION",
+  "MAN_ENTRY",
+  "IS_DEFAULT",
+)
+
+FIELD_ATTR = {
+  Field.CATEGORY: ("Category", "red"),
+  Field.OPTION: ("Option Name", "blue"),
+  Field.VALUE: ("Value", "cyan"),
+  Field.TYPE: ("Arg Type", "green"),
+  Field.ARG_USAGE: ("Arg Usage", "yellow"),
+  Field.SUMMARY: ("Summary", "green"),
+  Field.DESCRIPTION: ("Description", "white"),
+  Field.MAN_ENTRY: ("Man Page Entry", "blue"),
+  Field.IS_DEFAULT: ("Is Default", "magenta"),
+}
+
+
+def conf_handler(key, value):
+  if key == "features.config.selectionDetails.height":
+    return max(0, value)
+  elif key == "features.config.state.colWidth.option":
+    return max(5, value)
+  elif key == "features.config.state.colWidth.value":
+    return max(5, value)
+  elif key == "features.config.order":
+    return conf.parse_enum_csv(key, value[0], Field, 3)
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.config.order": [Field.MAN_ENTRY, Field.OPTION, Field.IS_DEFAULT],
+  "features.config.selectionDetails.height": 6,
+  "features.config.prepopulateEditValues": True,
+  "features.config.state.showPrivateOptions": False,
+  "features.config.state.showVirtualOptions": False,
+  "features.config.state.colWidth.option": 25,
+  "features.config.state.colWidth.value": 15,
+}, conf_handler)
+
+
+def get_field_from_label(field_label):
+  """
+  Converts field labels back to their enumeration, raising a ValueError if it
+  doesn't exist.
+  """
+
+  for entry_enum in FIELD_ATTR:
+    if field_label == FIELD_ATTR[entry_enum][0]:
+      return entry_enum
+
+
+class ConfigEntry():
+  """
+  Configuration option in the panel.
+  """
+
+  def __init__(self, option, type, is_default):
+    self.fields = {}
+    self.fields[Field.OPTION] = option
+    self.fields[Field.TYPE] = type
+    self.fields[Field.IS_DEFAULT] = is_default
+
+    # Fetches extra infromation from external sources (the arm config and tor
+    # man page). These are None if unavailable for this config option.
+
+    summary = tor_config.get_config_summary(option)
+    man_entry = tor_config.get_config_description(option)
+
+    if man_entry:
+      self.fields[Field.MAN_ENTRY] = man_entry.index
+      self.fields[Field.CATEGORY] = man_entry.category
+      self.fields[Field.ARG_USAGE] = man_entry.arg_usage
+      self.fields[Field.DESCRIPTION] = man_entry.description
+    else:
+      self.fields[Field.MAN_ENTRY] = 99999  # sorts non-man entries last
+      self.fields[Field.CATEGORY] = tor_config.Category.UNKNOWN
+      self.fields[Field.ARG_USAGE] = ""
+      self.fields[Field.DESCRIPTION] = ""
+
+    # uses the full man page description if a summary is unavailable
+
+    self.fields[Field.SUMMARY] = summary if summary is not None else self.fields[Field.DESCRIPTION]
+
+    # cache of what's displayed for this configuration option
+
+    self.label_cache = None
+    self.label_cache_args = None
+
+  def get(self, field):
+    """
+    Provides back the value in the given field.
+
+    Arguments:
+      field - enum for the field to be provided back
+    """
+
+    if field == Field.VALUE:
+      return self._get_value()
+    else:
+      return self.fields[field]
+
+  def get_all(self, fields):
+    """
+    Provides back a list with the given field values.
+
+    Arguments:
+      field - enums for the fields to be provided back
+    """
+
+    return [self.get(field) for field in fields]
+
+  def get_label(self, option_width, value_width, summary_width):
+    """
+    Provides display string of the configuration entry with the given
+    constraints on the width of the contents.
+
+    Arguments:
+      option_width  - width of the option column
+      value_width   - width of the value column
+      summary_width - width of the summary column
+    """
+
+    # Fetching the display entries is very common so this caches the values.
+    # Doing this substantially drops cpu usage when scrolling (by around 40%).
+
+    arg_set = (option_width, value_width, summary_width)
+
+    if not self.label_cache or self.label_cache_args != arg_set:
+      option_label = ui_tools.crop_str(self.get(Field.OPTION), option_width)
+      value_label = ui_tools.crop_str(self.get(Field.VALUE), value_width)
+      summary_label = ui_tools.crop_str(self.get(Field.SUMMARY), summary_width, None)
+      line_text_layout = "%%-%is %%-%is %%-%is" % (option_width, value_width, summary_width)
+      self.label_cache = line_text_layout % (option_label, value_label, summary_label)
+      self.label_cache_args = arg_set
+
+    return self.label_cache
+
+  def is_unset(self):
+    """
+    True if we have no value, false otherwise.
+    """
+
+    conf_value = tor_tools.get_conn().get_option(self.get(Field.OPTION), [], True)
+
+    return not bool(conf_value)
+
+  def _get_value(self):
+    """
+    Provides the current value of the configuration entry, taking advantage of
+    the tor_tools caching to effectively query the accurate value. This uses the
+    value's type to provide a user friendly representation if able.
+    """
+
+    conf_value = ", ".join(tor_tools.get_conn().get_option(self.get(Field.OPTION), [], True))
+
+    # provides nicer values for recognized types
+
+    if not conf_value:
+      conf_value = "<none>"
+    elif self.get(Field.TYPE) == "Boolean" and conf_value in ("0", "1"):
+      conf_value = "False" if conf_value == "0" else "True"
+    elif self.get(Field.TYPE) == "DataSize" and conf_value.isdigit():
+      conf_value = str_tools.get_size_label(int(conf_value))
+    elif self.get(Field.TYPE) == "TimeInterval" and conf_value.isdigit():
+      conf_value = str_tools.get_time_label(int(conf_value), is_long = True)
+
+    return conf_value
+
+
+class ConfigPanel(panel.Panel):
+  """
+  Renders a listing of the tor or arm configuration state, allowing options to
+  be selected and edited.
+  """
+
+  def __init__(self, stdscr, config_type):
+    panel.Panel.__init__(self, stdscr, "configuration", 0)
+
+    self.config_type = config_type
+    self.conf_contents = []
+    self.conf_important_contents = []
+    self.scroller = ui_tools.Scroller(True)
+    self.vals_lock = threading.RLock()
+
+    # shows all configuration options if true, otherwise only the ones with
+    # the 'important' flag are shown
+
+    self.show_all = False
+
+    # initializes config contents if we're connected
+
+    conn = tor_tools.get_conn()
+    conn.add_status_listener(self.reset_listener)
+
+    if conn.is_alive():
+      self.reset_listener(None, stem.control.State.INIT, None)
+
+  def reset_listener(self, controller, event_type, _):
+    # fetches configuration options if a new instance, otherewise keeps our
+    # current contents
+
+    if event_type == stem.control.State.INIT:
+      self._load_config_options()
+
+  def _load_config_options(self):
+    """
+    Fetches the configuration options available from tor or arm.
+    """
+
+    self.conf_contents = []
+    self.conf_important_contents = []
+
+    if self.config_type == State.TOR:
+      conn, config_option_lines = tor_tools.get_conn(), []
+      custom_options = tor_config.get_custom_options()
+      config_option_query = conn.get_info("config/names", None)
+
+      if config_option_query:
+        config_option_lines = config_option_query.strip().split("\n")
+
+      for line in config_option_lines:
+        # lines are of the form "<option> <type>[ <documentation>]", like:
+        # UseEntryGuards Boolean
+        # documentation is aparently only in older versions (for instance,
+        # 0.2.1.25)
+
+        line_comp = line.strip().split(" ")
+        conf_option, conf_type = line_comp[0], line_comp[1]
+
+        # skips private and virtual entries if not configured to show them
+
+        if not CONFIG["features.config.state.showPrivateOptions"] and conf_option.startswith("__"):
+          continue
+        elif not CONFIG["features.config.state.showVirtualOptions"] and conf_type == "Virtual":
+          continue
+
+        self.conf_contents.append(ConfigEntry(conf_option, conf_type, not conf_option in custom_options))
+
+    elif self.config_type == State.ARM:
+      # loaded via the conf utility
+
+      arm_config = conf.get_config("arm")
+
+      for key in arm_config.keys():
+        pass  # TODO: implement
+
+    # mirror listing with only the important configuration options
+
+    self.conf_important_contents = []
+
+    for entry in self.conf_contents:
+      if tor_config.is_important(entry.get(Field.OPTION)):
+        self.conf_important_contents.append(entry)
+
+    # if there aren't any important options then show everything
+
+    if not self.conf_important_contents:
+      self.conf_important_contents = self.conf_contents
+
+    self.set_sort_order()  # initial sorting of the contents
+
+  def get_selection(self):
+    """
+    Provides the currently selected entry.
+    """
+
+    return self.scroller.get_cursor_selection(self._get_config_options())
+
+  def set_filtering(self, is_filtered):
+    """
+    Sets if configuration options are filtered or not.
+
+    Arguments:
+      is_filtered - if true then only relatively important options will be
+                   shown, otherwise everything is shown
+    """
+
+    self.show_all = not is_filtered
+
+  def set_sort_order(self, ordering = None):
+    """
+    Sets the configuration attributes we're sorting by and resorts the
+    contents.
+
+    Arguments:
+      ordering - new ordering, if undefined then this resorts with the last
+                 set ordering
+    """
+
+    self.vals_lock.acquire()
+
+    if ordering:
+      CONFIG["features.config.order"] = ordering
+
+    self.conf_contents.sort(key=lambda i: (i.get_all(CONFIG["features.config.order"])))
+    self.conf_important_contents.sort(key=lambda i: (i.get_all(CONFIG["features.config.order"])))
+    self.vals_lock.release()
+
+  def show_sort_dialog(self):
+    """
+    Provides the sort dialog for our configuration options.
+    """
+
+    # set ordering for config options
+
+    title_label = "Config Option Ordering:"
+    options = [FIELD_ATTR[field][0] for field in Field]
+    old_selection = [FIELD_ATTR[field][0] for field in CONFIG["features.config.order"]]
+    option_colors = dict([FIELD_ATTR[field] for field in Field])
+    results = popups.show_sort_dialog(title_label, options, old_selection, option_colors)
+
+    if results:
+      # converts labels back to enums
+      result_enums = [get_field_from_label(label) for label in results]
+      self.set_sort_order(result_enums)
+
+  def handle_key(self, key):
+    self.vals_lock.acquire()
+    is_keystroke_consumed = True
+
+    if ui_tools.is_scroll_key(key):
+      page_height = self.get_preferred_size()[0] - 1
+      detail_panel_height = CONFIG["features.config.selectionDetails.height"]
+
+      if detail_panel_height > 0 and detail_panel_height + 2 <= page_height:
+        page_height -= (detail_panel_height + 1)
+
+      is_changed = self.scroller.handle_key(key, self._get_config_options(), page_height)
+
+      if is_changed:
+        self.redraw(True)
+    elif ui_tools.is_selection_key(key) and self._get_config_options():
+      # Prompts the user to edit the selected configuration value. The
+      # interface is locked to prevent updates between setting the value
+      # and showing any errors.
+
+      panel.CURSES_LOCK.acquire()
+
+      try:
+        selection = self.get_selection()
+        config_option = selection.get(Field.OPTION)
+
+        if selection.is_unset():
+          initial_value = ""
+        else:
+          initial_value = selection.get(Field.VALUE)
+
+        prompt_msg = "%s Value (esc to cancel): " % config_option
+        is_prepopulated = CONFIG["features.config.prepopulateEditValues"]
+        new_value = popups.input_prompt(prompt_msg, initial_value if is_prepopulated else "")
+
+        if new_value is not None and new_value != initial_value:
+          try:
+            if selection.get(Field.TYPE) == "Boolean":
+              # if the value's a boolean then allow for 'true' and 'false' inputs
+
+              if new_value.lower() == "true":
+                new_value = "1"
+              elif new_value.lower() == "false":
+                new_value = "0"
+            elif selection.get(Field.TYPE) == "LineList":
+              # set_option accepts list inputs when there's multiple values
+              new_value = new_value.split(",")
+
+            tor_tools.get_conn().set_option(config_option, new_value)
+
+            # forces the label to be remade with the new value
+
+            selection.label_cache = None
+
+            # resets the is_default flag
+
+            custom_options = tor_config.get_custom_options()
+            selection.fields[Field.IS_DEFAULT] = not config_option in custom_options
+
+            self.redraw(True)
+          except Exception as exc:
+            popups.show_msg("%s (press any key)" % exc)
+      finally:
+        panel.CURSES_LOCK.release()
+    elif key == ord('a') or key == ord('A'):
+      self.show_all = not self.show_all
+      self.redraw(True)
+    elif key == ord('s') or key == ord('S'):
+      self.show_sort_dialog()
+    elif key == ord('v') or key == ord('V'):
+      self.show_write_dialog()
+    else:
+      is_keystroke_consumed = False
+
+    self.vals_lock.release()
+    return is_keystroke_consumed
+
+  def show_write_dialog(self):
+    """
+    Provies an interface to confirm if the configuration is saved and, if so,
+    where.
+    """
+
+    # display a popup for saving the current configuration
+
+    config_lines = tor_config.get_custom_options(True)
+    popup, width, height = popups.init(len(config_lines) + 2)
+
+    if not popup:
+      return
+
+    try:
+      # displayed options (truncating the labels if there's limited room)
+
+      if width >= 30:
+        selection_options = ("Save", "Save As...", "Cancel")
+      else:
+        selection_options = ("Save", "Save As", "X")
+
+      # checks if we can show options beside the last line of visible content
+
+      is_option_line_separate = False
+      last_index = min(height - 2, len(config_lines) - 1)
+
+      # if we don't have room to display the selection options and room to
+      # grow then display the selection options on its own line
+
+      if width < (30 + len(config_lines[last_index])):
+        popup.set_height(height + 1)
+        popup.redraw(True)  # recreates the window instance
+        new_height, _ = popup.get_preferred_size()
+
+        if new_height > height:
+          height = new_height
+          is_option_line_separate = True
+
+      key, selection = 0, 2
+
+      while not ui_tools.is_selection_key(key):
+        # if the popup has been resized then recreate it (needed for the
+        # proper border height)
+
+        new_height, new_width = popup.get_preferred_size()
+
+        if (height, width) != (new_height, new_width):
+          height, width = new_height, new_width
+          popup.redraw(True)
+
+        # if there isn't room to display the popup then cancel it
+
+        if height <= 2:
+          selection = 2
+          break
+
+        popup.win.erase()
+        popup.win.box()
+        popup.addstr(0, 0, "Configuration being saved:", curses.A_STANDOUT)
+
+        visible_config_lines = height - 3 if is_option_line_separate else height - 2
+
+        for i in range(visible_config_lines):
+          line = ui_tools.crop_str(config_lines[i], width - 2)
+
+          if " " in line:
+            option, arg = line.split(" ", 1)
+            popup.addstr(i + 1, 1, option, curses.A_BOLD | ui_tools.get_color("green"))
+            popup.addstr(i + 1, len(option) + 2, arg, curses.A_BOLD | ui_tools.get_color("cyan"))
+          else:
+            popup.addstr(i + 1, 1, line, curses.A_BOLD | ui_tools.get_color("green"))
+
+        # draws selection options (drawn right to left)
+
+        draw_x = width - 1
+
+        for i in range(len(selection_options) - 1, -1, -1):
+          option_label = selection_options[i]
+          draw_x -= (len(option_label) + 2)
+
+          # if we've run out of room then drop the option (this will only
+          # occure on tiny displays)
+
+          if draw_x < 1:
+            break
+
+          selection_format = curses.A_STANDOUT if i == selection else curses.A_NORMAL
+          popup.addstr(height - 2, draw_x, "[")
+          popup.addstr(height - 2, draw_x + 1, option_label, selection_format | curses.A_BOLD)
+          popup.addstr(height - 2, draw_x + len(option_label) + 1, "]")
+
+          draw_x -= 1  # space gap between the options
+
+        popup.win.refresh()
+
+        key = arm.controller.get_controller().get_screen().getch()
+
+        if key == curses.KEY_LEFT:
+          selection = max(0, selection - 1)
+        elif key == curses.KEY_RIGHT:
+          selection = min(len(selection_options) - 1, selection + 1)
+
+      if selection in (0, 1):
+        loaded_torrc, prompt_canceled = tor_config.get_torrc(), False
+
+        try:
+          config_location = loaded_torrc.get_config_location()
+        except IOError:
+          config_location = ""
+
+        if selection == 1:
+          # prompts user for a configuration location
+          config_location = popups.input_prompt("Save to (esc to cancel): ", config_location)
+
+          if not config_location:
+            prompt_canceled = True
+
+        if not prompt_canceled:
+          try:
+            tor_config.save_conf(config_location, config_lines)
+            msg = "Saved configuration to %s" % config_location
+          except IOError as exc:
+            msg = "Unable to save configuration (%s)" % exc.strerror
+
+          popups.show_msg(msg, 2)
+    finally:
+      popups.finalize()
+
+  def get_help(self):
+    options = []
+    options.append(("up arrow", "scroll up a line", None))
+    options.append(("down arrow", "scroll down a line", None))
+    options.append(("page up", "scroll up a page", None))
+    options.append(("page down", "scroll down a page", None))
+    options.append(("enter", "edit configuration option", None))
+    options.append(("v", "save configuration", None))
+    options.append(("a", "toggle option filtering", None))
+    options.append(("s", "sort ordering", None))
+    return options
+
+  def draw(self, width, height):
+    self.vals_lock.acquire()
+
+    # panel with details for the current selection
+
+    detail_panel_height = CONFIG["features.config.selectionDetails.height"]
+    is_scrollbar_visible = False
+
+    if detail_panel_height == 0 or detail_panel_height + 2 >= height:
+      # no detail panel
+
+      detail_panel_height = 0
+      scroll_location = self.scroller.get_scroll_location(self._get_config_options(), height - 1)
+      cursor_selection = self.get_selection()
+      is_scrollbar_visible = len(self._get_config_options()) > height - 1
+    else:
+      # Shrink detail panel if there isn't sufficient room for the whole
+      # thing. The extra line is for the bottom border.
+
+      detail_panel_height = min(height - 1, detail_panel_height + 1)
+      scroll_location = self.scroller.get_scroll_location(self._get_config_options(), height - 1 - detail_panel_height)
+      cursor_selection = self.get_selection()
+      is_scrollbar_visible = len(self._get_config_options()) > height - detail_panel_height - 1
+
+      if cursor_selection is not None:
+        self._draw_selection_panel(cursor_selection, width, detail_panel_height, is_scrollbar_visible)
+
+    # draws the top label
+
+    if self.is_title_visible():
+      config_type = "Tor" if self.config_type == State.TOR else "Arm"
+      hidden_msg = "press 'a' to hide most options" if self.show_all else "press 'a' to show all options"
+      title_label = "%s Configuration (%s):" % (config_type, hidden_msg)
+      self.addstr(0, 0, title_label, curses.A_STANDOUT)
+
+    # draws left-hand scroll bar if content's longer than the height
+
+    scroll_offset = 1
+
+    if is_scrollbar_visible:
+      scroll_offset = 3
+      self.add_scroll_bar(scroll_location, scroll_location + height - detail_panel_height - 1, len(self._get_config_options()), 1 + detail_panel_height)
+
+    option_width = CONFIG["features.config.state.colWidth.option"]
+    value_width = CONFIG["features.config.state.colWidth.value"]
+    description_width = max(0, width - scroll_offset - option_width - value_width - 2)
+
+    # if the description column is overly long then use its space for the
+    # value instead
+
+    if description_width > 80:
+      value_width += description_width - 80
+      description_width = 80
+
+    for line_number in range(scroll_location, len(self._get_config_options())):
+      entry = self._get_config_options()[line_number]
+      draw_line = line_number + detail_panel_height + 1 - scroll_location
+
+      line_format = curses.A_NORMAL if entry.get(Field.IS_DEFAULT) else curses.A_BOLD
+
+      if entry.get(Field.CATEGORY):
+        line_format |= ui_tools.get_color(CATEGORY_COLOR[entry.get(Field.CATEGORY)])
+
+      if entry == cursor_selection:
+        line_format |= curses.A_STANDOUT
+
+      line_text = entry.get_label(option_width, value_width, description_width)
+      self.addstr(draw_line, scroll_offset, line_text, line_format)
+
+      if draw_line >= height:
+        break
+
+    self.vals_lock.release()
+
+  def _get_config_options(self):
+    return self.conf_contents if self.show_all else self.conf_important_contents
+
+  def _draw_selection_panel(self, selection, width, detail_panel_height, is_scrollbar_visible):
+    """
+    Renders a panel for the selected configuration option.
+    """
+
+    # This is a solid border unless the scrollbar is visible, in which case a
+    # 'T' pipe connects the border to the bar.
+
+    ui_tools.draw_box(self, 0, 0, width, detail_panel_height + 1)
+
+    if is_scrollbar_visible:
+      self.addch(detail_panel_height, 1, curses.ACS_TTEE)
+
+    selection_format = curses.A_BOLD | ui_tools.get_color(CATEGORY_COLOR[selection.get(Field.CATEGORY)])
+
+    # first entry:
+    # <option> (<category> Option)
+
+    option_label = " (%s Option)" % selection.get(Field.CATEGORY)
+    self.addstr(1, 2, selection.get(Field.OPTION) + option_label, selection_format)
+
+    # second entry:
+    # Value: <value> ([default|custom], <type>, usage: <argument usage>)
+
+    if detail_panel_height >= 3:
+      value_attr = []
+      value_attr.append("default" if selection.get(Field.IS_DEFAULT) else "custom")
+      value_attr.append(selection.get(Field.TYPE))
+      value_attr.append("usage: %s" % (selection.get(Field.ARG_USAGE)))
+      value_attr_label = ", ".join(value_attr)
+
+      value_label_width = width - 12 - len(value_attr_label)
+      value_label = ui_tools.crop_str(selection.get(Field.VALUE), value_label_width)
+
+      self.addstr(2, 2, "Value: %s (%s)" % (value_label, value_attr_label), selection_format)
+
+    # remainder is filled with the man page description
+
+    description_height = max(0, detail_panel_height - 3)
+    description_content = "Description: " + selection.get(Field.DESCRIPTION)
+
+    for i in range(description_height):
+      # checks if we're done writing the description
+
+      if not description_content:
+        break
+
+      # there's a leading indent after the first line
+
+      if i > 0:
+        description_content = "  " + description_content
+
+      # we only want to work with content up until the next newline
+
+      if "\n" in description_content:
+        line_content, description_content = description_content.split("\n", 1)
+      else:
+        line_content, description_content = description_content, ""
+
+      if i != description_height - 1:
+        # there's more lines to display
+
+        msg, remainder = ui_tools.crop_str(line_content, width - 3, 4, 4, ui_tools.Ending.HYPHEN, True)
+        description_content = remainder.strip() + description_content
+      else:
+        # this is the last line, end it with an ellipse
+
+        msg = ui_tools.crop_str(line_content, width - 3, 4, 4)
+
+      self.addstr(3 + i, 2, msg, selection_format)
diff --git a/arm/connections/__init__.py b/arm/connections/__init__.py
index 8e0444a..bf4cf89 100644
--- a/arm/connections/__init__.py
+++ b/arm/connections/__init__.py
@@ -2,4 +2,4 @@
 Connection panel related resources.
 """
 
-__all__ = ["circEntry", "connEntry", "conn_panel", "countPopup", "descriptorPopup", "entries"]
+__all__ = ["circ_entry", "conn_entry", "conn_panel", "count_popup", "descriptor_popup", "entries"]
diff --git a/arm/connections/circEntry.py b/arm/connections/circEntry.py
deleted file mode 100644
index bed6528..0000000
--- a/arm/connections/circEntry.py
+++ /dev/null
@@ -1,216 +0,0 @@
-"""
-Connection panel entries for client circuits. This includes a header entry
-followed by an entry for each hop in the circuit. For instance:
-
-89.188.20.246:42667    -->  217.172.182.26 (de)       General / Built     8.6m (CIRCUIT)
-|  85.8.28.4 (se)               98FBC3B2B93897A78CDD797EF549E6B62C9A8523    1 / Guard
-|  91.121.204.76 (fr)           546387D93F8D40CFF8842BB9D3A8EC477CEDA984    2 / Middle
-+- 217.172.182.26 (de)          5CFA9EA136C0EA0AC096E5CEA7EB674F1207CF86    3 / Exit
-"""
-
-import curses
-
-from arm.connections import entries, connEntry
-from arm.util import torTools, uiTools
-
-
-class CircEntry(connEntry.ConnectionEntry):
-  def __init__(self, circuit_id, status, purpose, path):
-    connEntry.ConnectionEntry.__init__(self, "127.0.0.1", "0", "127.0.0.1", "0")
-
-    self.circuit_id = circuit_id
-    self.status = status
-
-    # drops to lowercase except the first letter
-
-    if len(purpose) >= 2:
-      purpose = purpose[0].upper() + purpose[1:].lower()
-
-    self.lines = [CircHeaderLine(self.circuit_id, purpose)]
-
-    # Overwrites attributes of the initial line to make it more fitting as the
-    # header for our listing.
-
-    self.lines[0].base_type = connEntry.Category.CIRCUIT
-
-    self.update(status, path)
-
-  def update(self, status, path):
-    """
-    Our status and path can change over time if the circuit is still in the
-    process of being built. Updates these attributes of our relay.
-
-    Arguments:
-      status - new status of the circuit
-      path   - list of fingerprints for the series of relays involved in the
-               circuit
-    """
-
-    self.status = status
-    self.lines = [self.lines[0]]
-    conn = torTools.get_conn()
-
-    if status == "BUILT" and not self.lines[0].is_built:
-      exit_ip, exit_port = conn.get_relay_address(path[-1], ("192.168.0.1", "0"))
-      self.lines[0].set_exit(exit_ip, exit_port, path[-1])
-
-    for i in range(len(path)):
-      relay_fingerprint = path[i]
-      relay_ip, relay_port = conn.get_relay_address(relay_fingerprint, ("192.168.0.1", "0"))
-
-      if i == len(path) - 1:
-        if status == "BUILT":
-          placement_type = "Exit"
-        else:
-          placement_type = "Extending"
-      elif i == 0:
-        placement_type = "Guard"
-      else:
-        placement_type = "Middle"
-
-      placement_label = "%i / %s" % (i + 1, placement_type)
-
-      self.lines.append(CircLine(relay_ip, relay_port, relay_fingerprint, placement_label))
-
-    self.lines[-1].is_last = True
-
-
-class CircHeaderLine(connEntry.ConnectionLine):
-  """
-  Initial line of a client entry. This has the same basic format as connection
-  lines except that its etc field has circuit attributes.
-  """
-
-  def __init__(self, circuit_id, purpose):
-    connEntry.ConnectionLine.__init__(self, "127.0.0.1", "0", "0.0.0.0", "0", False, False)
-    self.circuit_id = circuit_id
-    self.purpose = purpose
-    self.is_built = False
-
-  def set_exit(self, exit_address, exit_port, exit_fingerprint):
-    connEntry.ConnectionLine.__init__(self, "127.0.0.1", "0", exit_address, exit_port, False, False)
-    self.is_built = True
-    self.foreign.fingerprint_overwrite = exit_fingerprint
-
-  def get_type(self):
-    return connEntry.Category.CIRCUIT
-
-  def get_destination_label(self, max_length, include_locale=False, include_hostname=False):
-    if not self.is_built:
-      return "Building..."
-
-    return connEntry.ConnectionLine.get_destination_label(self, max_length, include_locale, include_hostname)
-
-  def get_etc_content(self, width, listing_type):
-    """
-    Attempts to provide all circuit related stats. Anything that can't be
-    shown completely (not enough room) is dropped.
-    """
-
-    etc_attr = ["Purpose: %s" % self.purpose, "Circuit ID: %i" % self.circuit_id]
-
-    for i in range(len(etc_attr), -1, -1):
-      etc_label = ", ".join(etc_attr[:i])
-
-      if len(etc_label) <= width:
-        return ("%%-%is" % width) % etc_label
-
-    return ""
-
-  def get_details(self, width):
-    if not self.is_built:
-      detail_format = curses.A_BOLD | uiTools.get_color(connEntry.CATEGORY_COLOR[self.get_type()])
-      return [("Building Circuit...", detail_format)]
-    else:
-      return connEntry.ConnectionLine.get_details(self, width)
-
-
-class CircLine(connEntry.ConnectionLine):
-  """
-  An individual hop in a circuit. This overwrites the displayed listing, but
-  otherwise makes use of the ConnectionLine attributes (for the detail display,
-  caching, etc).
-  """
-
-  def __init__(self, remote_address, remote_port, remote_fingerprint, placement_label):
-    connEntry.ConnectionLine.__init__(self, "127.0.0.1", "0", remote_address, remote_port)
-    self.foreign.fingerprint_overwrite = remote_fingerprint
-    self.placement_label = placement_label
-    self.include_port = False
-
-    # determines the sort of left hand bracketing we use
-
-    self.is_last = False
-
-  def get_type(self):
-    return connEntry.Category.CIRCUIT
-
-  def get_listing_prefix(self):
-    if self.is_last:
-      return (ord(' '), curses.ACS_LLCORNER, curses.ACS_HLINE, ord(' '))
-    else:
-      return (ord(' '), curses.ACS_VLINE, ord(' '), ord(' '))
-
-  def get_listing_entry(self, width, current_time, listing_type):
-    """
-    Provides the [(msg, attr)...] listing for this relay in the circuilt
-    listing. Lines are composed of the following components:
-      <bracket> <dst> <etc> <placement label>
-
-    The dst and etc entries largely match their ConnectionEntry counterparts.
-
-    Arguments:
-      width       - maximum length of the line
-      current_time - the current unix time (ignored)
-      listing_type - primary attribute we're listing connections by
-    """
-
-    return entries.ConnectionPanelLine.get_listing_entry(self, width, current_time, listing_type)
-
-  def _get_listing_entry(self, width, current_time, listing_type):
-    line_format = uiTools.get_color(connEntry.CATEGORY_COLOR[self.get_type()])
-
-    # The required widths are the sum of the following:
-    # initial space (1 character)
-    # bracketing (3 characters)
-    # placement_label (14 characters)
-    # gap between etc and placement label (5 characters)
-
-    baseline_space = 14 + 5
-
-    dst, etc = "", ""
-
-    if listing_type == entries.ListingType.IP_ADDRESS:
-      # TODO: include hostname when that's available
-      # dst width is derived as:
-      # src (21) + dst (26) + divider (7) + right gap (2) - bracket (3) = 53 char
-
-      dst = "%-53s" % self.get_destination_label(53, include_locale = True)
-
-      # fills the nickname into the empty space here
-
-      dst = "%s%-25s   " % (dst[:25], uiTools.crop_str(self.foreign.get_nickname(), 25, 0))
-
-      etc = self.get_etc_content(width - baseline_space - len(dst), listing_type)
-    elif listing_type == entries.ListingType.HOSTNAME:
-      # min space for the hostname is 40 characters
-
-      etc = self.get_etc_content(width - baseline_space - 40, listing_type)
-      dst_layout = "%%-%is" % (width - baseline_space - len(etc))
-      dst = dst_layout % self.foreign.get_hostname(self.foreign.get_address())
-    elif listing_type == entries.ListingType.FINGERPRINT:
-      # dst width is derived as:
-      # src (9) + dst (40) + divider (7) + right gap (2) - bracket (3) = 55 char
-
-      dst = "%-55s" % self.foreign.get_fingerprint()
-      etc = self.get_etc_content(width - baseline_space - len(dst), listing_type)
-    else:
-      # min space for the nickname is 56 characters
-
-      etc = self.get_etc_content(width - baseline_space - 56, listing_type)
-      dst_layout = "%%-%is" % (width - baseline_space - len(etc))
-      dst = dst_layout % self.foreign.get_nickname()
-
-    return ((dst + etc, line_format),
-            (" " * (width - baseline_space - len(dst) - len(etc) + 5), line_format),
-            ("%-14s" % self.placement_label, line_format))
diff --git a/arm/connections/circ_entry.py b/arm/connections/circ_entry.py
new file mode 100644
index 0000000..cc6b418
--- /dev/null
+++ b/arm/connections/circ_entry.py
@@ -0,0 +1,216 @@
+"""
+Connection panel entries for client circuits. This includes a header entry
+followed by an entry for each hop in the circuit. For instance:
+
+89.188.20.246:42667    -->  217.172.182.26 (de)       General / Built     8.6m (CIRCUIT)
+|  85.8.28.4 (se)               98FBC3B2B93897A78CDD797EF549E6B62C9A8523    1 / Guard
+|  91.121.204.76 (fr)           546387D93F8D40CFF8842BB9D3A8EC477CEDA984    2 / Middle
++- 217.172.182.26 (de)          5CFA9EA136C0EA0AC096E5CEA7EB674F1207CF86    3 / Exit
+"""
+
+import curses
+
+from arm.connections import entries, conn_entry
+from arm.util import tor_tools, ui_tools
+
+
+class CircEntry(conn_entry.ConnectionEntry):
+  def __init__(self, circuit_id, status, purpose, path):
+    conn_entry.ConnectionEntry.__init__(self, "127.0.0.1", "0", "127.0.0.1", "0")
+
+    self.circuit_id = circuit_id
+    self.status = status
+
+    # drops to lowercase except the first letter
+
+    if len(purpose) >= 2:
+      purpose = purpose[0].upper() + purpose[1:].lower()
+
+    self.lines = [CircHeaderLine(self.circuit_id, purpose)]
+
+    # Overwrites attributes of the initial line to make it more fitting as the
+    # header for our listing.
+
+    self.lines[0].base_type = conn_entry.Category.CIRCUIT
+
+    self.update(status, path)
+
+  def update(self, status, path):
+    """
+    Our status and path can change over time if the circuit is still in the
+    process of being built. Updates these attributes of our relay.
+
+    Arguments:
+      status - new status of the circuit
+      path   - list of fingerprints for the series of relays involved in the
+               circuit
+    """
+
+    self.status = status
+    self.lines = [self.lines[0]]
+    conn = tor_tools.get_conn()
+
+    if status == "BUILT" and not self.lines[0].is_built:
+      exit_ip, exit_port = conn.get_relay_address(path[-1], ("192.168.0.1", "0"))
+      self.lines[0].set_exit(exit_ip, exit_port, path[-1])
+
+    for i in range(len(path)):
+      relay_fingerprint = path[i]
+      relay_ip, relay_port = conn.get_relay_address(relay_fingerprint, ("192.168.0.1", "0"))
+
+      if i == len(path) - 1:
+        if status == "BUILT":
+          placement_type = "Exit"
+        else:
+          placement_type = "Extending"
+      elif i == 0:
+        placement_type = "Guard"
+      else:
+        placement_type = "Middle"
+
+      placement_label = "%i / %s" % (i + 1, placement_type)
+
+      self.lines.append(CircLine(relay_ip, relay_port, relay_fingerprint, placement_label))
+
+    self.lines[-1].is_last = True
+
+
+class CircHeaderLine(conn_entry.ConnectionLine):
+  """
+  Initial line of a client entry. This has the same basic format as connection
+  lines except that its etc field has circuit attributes.
+  """
+
+  def __init__(self, circuit_id, purpose):
+    conn_entry.ConnectionLine.__init__(self, "127.0.0.1", "0", "0.0.0.0", "0", False, False)
+    self.circuit_id = circuit_id
+    self.purpose = purpose
+    self.is_built = False
+
+  def set_exit(self, exit_address, exit_port, exit_fingerprint):
+    conn_entry.ConnectionLine.__init__(self, "127.0.0.1", "0", exit_address, exit_port, False, False)
+    self.is_built = True
+    self.foreign.fingerprint_overwrite = exit_fingerprint
+
+  def get_type(self):
+    return conn_entry.Category.CIRCUIT
+
+  def get_destination_label(self, max_length, include_locale=False, include_hostname=False):
+    if not self.is_built:
+      return "Building..."
+
+    return conn_entry.ConnectionLine.get_destination_label(self, max_length, include_locale, include_hostname)
+
+  def get_etc_content(self, width, listing_type):
+    """
+    Attempts to provide all circuit related stats. Anything that can't be
+    shown completely (not enough room) is dropped.
+    """
+
+    etc_attr = ["Purpose: %s" % self.purpose, "Circuit ID: %i" % self.circuit_id]
+
+    for i in range(len(etc_attr), -1, -1):
+      etc_label = ", ".join(etc_attr[:i])
+
+      if len(etc_label) <= width:
+        return ("%%-%is" % width) % etc_label
+
+    return ""
+
+  def get_details(self, width):
+    if not self.is_built:
+      detail_format = curses.A_BOLD | ui_tools.get_color(conn_entry.CATEGORY_COLOR[self.get_type()])
+      return [("Building Circuit...", detail_format)]
+    else:
+      return conn_entry.ConnectionLine.get_details(self, width)
+
+
+class CircLine(conn_entry.ConnectionLine):
+  """
+  An individual hop in a circuit. This overwrites the displayed listing, but
+  otherwise makes use of the ConnectionLine attributes (for the detail display,
+  caching, etc).
+  """
+
+  def __init__(self, remote_address, remote_port, remote_fingerprint, placement_label):
+    conn_entry.ConnectionLine.__init__(self, "127.0.0.1", "0", remote_address, remote_port)
+    self.foreign.fingerprint_overwrite = remote_fingerprint
+    self.placement_label = placement_label
+    self.include_port = False
+
+    # determines the sort of left hand bracketing we use
+
+    self.is_last = False
+
+  def get_type(self):
+    return conn_entry.Category.CIRCUIT
+
+  def get_listing_prefix(self):
+    if self.is_last:
+      return (ord(' '), curses.ACS_LLCORNER, curses.ACS_HLINE, ord(' '))
+    else:
+      return (ord(' '), curses.ACS_VLINE, ord(' '), ord(' '))
+
+  def get_listing_entry(self, width, current_time, listing_type):
+    """
+    Provides the [(msg, attr)...] listing for this relay in the circuilt
+    listing. Lines are composed of the following components:
+      <bracket> <dst> <etc> <placement label>
+
+    The dst and etc entries largely match their ConnectionEntry counterparts.
+
+    Arguments:
+      width       - maximum length of the line
+      current_time - the current unix time (ignored)
+      listing_type - primary attribute we're listing connections by
+    """
+
+    return entries.ConnectionPanelLine.get_listing_entry(self, width, current_time, listing_type)
+
+  def _get_listing_entry(self, width, current_time, listing_type):
+    line_format = ui_tools.get_color(conn_entry.CATEGORY_COLOR[self.get_type()])
+
+    # The required widths are the sum of the following:
+    # initial space (1 character)
+    # bracketing (3 characters)
+    # placement_label (14 characters)
+    # gap between etc and placement label (5 characters)
+
+    baseline_space = 14 + 5
+
+    dst, etc = "", ""
+
+    if listing_type == entries.ListingType.IP_ADDRESS:
+      # TODO: include hostname when that's available
+      # dst width is derived as:
+      # src (21) + dst (26) + divider (7) + right gap (2) - bracket (3) = 53 char
+
+      dst = "%-53s" % self.get_destination_label(53, include_locale = True)
+
+      # fills the nickname into the empty space here
+
+      dst = "%s%-25s   " % (dst[:25], ui_tools.crop_str(self.foreign.get_nickname(), 25, 0))
+
+      etc = self.get_etc_content(width - baseline_space - len(dst), listing_type)
+    elif listing_type == entries.ListingType.HOSTNAME:
+      # min space for the hostname is 40 characters
+
+      etc = self.get_etc_content(width - baseline_space - 40, listing_type)
+      dst_layout = "%%-%is" % (width - baseline_space - len(etc))
+      dst = dst_layout % self.foreign.get_hostname(self.foreign.get_address())
+    elif listing_type == entries.ListingType.FINGERPRINT:
+      # dst width is derived as:
+      # src (9) + dst (40) + divider (7) + right gap (2) - bracket (3) = 55 char
+
+      dst = "%-55s" % self.foreign.get_fingerprint()
+      etc = self.get_etc_content(width - baseline_space - len(dst), listing_type)
+    else:
+      # min space for the nickname is 56 characters
+
+      etc = self.get_etc_content(width - baseline_space - 56, listing_type)
+      dst_layout = "%%-%is" % (width - baseline_space - len(etc))
+      dst = dst_layout % self.foreign.get_nickname()
+
+    return ((dst + etc, line_format),
+            (" " * (width - baseline_space - len(dst) - len(etc) + 5), line_format),
+            ("%-14s" % self.placement_label, line_format))
diff --git a/arm/connections/connEntry.py b/arm/connections/connEntry.py
deleted file mode 100644
index 230ca7f..0000000
--- a/arm/connections/connEntry.py
+++ /dev/null
@@ -1,957 +0,0 @@
-"""
-Connection panel entries related to actual connections to or from the system
-(ie, results seen by netstat, lsof, etc).
-"""
-
-import time
-import curses
-
-from arm.util import torTools, uiTools
-from arm.connections import entries
-
-from stem.util import conf, connection, enum, str_tools
-
-# Connection Categories:
-#   Inbound      Relay connection, coming to us.
-#   Outbound     Relay connection, leaving us.
-#   Exit         Outbound relay connection leaving the Tor network.
-#   Hidden       Connections to a hidden service we're providing.
-#   Socks        Socks connections for applications using Tor.
-#   Circuit      Circuits our tor client has created.
-#   Directory    Fetching tor consensus information.
-#   Control      Tor controller (arm, vidalia, etc).
-
-Category = enum.Enum("INBOUND", "OUTBOUND", "EXIT", "HIDDEN", "SOCKS", "CIRCUIT", "DIRECTORY", "CONTROL")
-
-CATEGORY_COLOR = {
-  Category.INBOUND: "green",
-  Category.OUTBOUND: "blue",
-  Category.EXIT: "red",
-  Category.HIDDEN: "magenta",
-  Category.SOCKS: "yellow",
-  Category.CIRCUIT: "cyan",
-  Category.DIRECTORY: "magenta",
-  Category.CONTROL: "red",
-}
-
-# static data for listing format
-# <src>  -->  <dst>  <etc><padding>
-
-LABEL_FORMAT = "%s  -->  %s  %s%s"
-LABEL_MIN_PADDING = 2  # min space between listing label and following data
-
-# sort value for scrubbed ip addresses
-
-SCRUBBED_IP_VAL = 255 ** 4
-
-CONFIG = conf.config_dict("arm", {
-  "features.connection.markInitialConnections": True,
-  "features.connection.showIps": True,
-  "features.connection.showExitPort": True,
-  "features.connection.showColumn.fingerprint": True,
-  "features.connection.showColumn.nickname": True,
-  "features.connection.showColumn.destination": True,
-  "features.connection.showColumn.expandedIp": True,
-})
-
-
-class Endpoint:
-  """
-  Collection of attributes associated with a connection endpoint. This is a
-  thin wrapper for torUtil functions, making use of its caching for
-  performance.
-  """
-
-  def __init__(self, address, port):
-    self.address = address
-    self.port = port
-
-    # if true, we treat the port as an definitely not being an ORPort when
-    # searching for matching fingerprints (otherwise we use it to possably
-    # narrow results when unknown)
-
-    self.is_not_or_port = True
-
-    # if set then this overwrites fingerprint lookups
-
-    self.fingerprint_overwrite = None
-
-  def get_address(self):
-    """
-    Provides the IP address of the endpoint.
-    """
-
-    return self.address
-
-  def get_port(self):
-    """
-    Provides the port of the endpoint.
-    """
-
-    return self.port
-
-  def get_hostname(self, default = None):
-    """
-    Provides the hostname associated with the relay's address. This is a
-    non-blocking call and returns None if the address either can't be resolved
-    or hasn't been resolved yet.
-
-    Arguments:
-      default - return value if no hostname is available
-    """
-
-    # TODO: skipping all hostname resolution to be safe for now
-    #try:
-    #  myHostname = hostnames.resolve(self.address)
-    #except:
-    #  # either a ValueError or IOError depending on the source of the lookup failure
-    #  myHostname = None
-    #
-    #if not myHostname: return default
-    #else: return myHostname
-
-    return default
-
-  def get_locale(self, default=None):
-    """
-    Provides the two letter country code for the IP address' locale.
-
-    Arguments:
-      default - return value if no locale information is available
-    """
-
-    conn = torTools.get_conn()
-    return conn.get_info("ip-to-country/%s" % self.address, default)
-
-  def get_fingerprint(self):
-    """
-    Provides the fingerprint of the relay, returning "UNKNOWN" if it can't be
-    determined.
-    """
-
-    if self.fingerprint_overwrite:
-      return self.fingerprint_overwrite
-
-    conn = torTools.get_conn()
-    my_fingerprint = conn.get_relay_fingerprint(self.address)
-
-    # If there were multiple matches and our port is likely the ORPort then
-    # try again with that to narrow the results.
-
-    if not my_fingerprint and not self.is_not_or_port:
-      my_fingerprint = conn.get_relay_fingerprint(self.address, self.port)
-
-    if my_fingerprint:
-      return my_fingerprint
-    else:
-      return "UNKNOWN"
-
-  def get_nickname(self):
-    """
-    Provides the nickname of the relay, retuning "UNKNOWN" if it can't be
-    determined.
-    """
-
-    my_fingerprint = self.get_fingerprint()
-
-    if my_fingerprint != "UNKNOWN":
-      conn = torTools.get_conn()
-      my_nickname = conn.get_relay_nickname(my_fingerprint)
-
-      if my_nickname:
-        return my_nickname
-      else:
-        return "UNKNOWN"
-    else:
-      return "UNKNOWN"
-
-
-class ConnectionEntry(entries.ConnectionPanelEntry):
-  """
-  Represents a connection being made to or from this system. These only
-  concern real connections so it includes the inbound, outbound, directory,
-  application, and controller categories.
-  """
-
-  def __init__(self, local_address, local_port, remote_address, remote_port):
-    entries.ConnectionPanelEntry.__init__(self)
-    self.lines = [ConnectionLine(local_address, local_port, remote_address, remote_port)]
-
-  def get_sort_value(self, attr, listing_type):
-    """
-    Provides the value of a single attribute used for sorting purposes.
-    """
-
-    connection_line = self.lines[0]
-
-    if attr == entries.SortAttr.IP_ADDRESS:
-      if connection_line.is_private():
-        return SCRUBBED_IP_VAL  # orders at the end
-
-      return connection_line.sort_address
-    elif attr == entries.SortAttr.PORT:
-      return connection_line.sort_port
-    elif attr == entries.SortAttr.HOSTNAME:
-      if connection_line.is_private():
-        return ""
-
-      return connection_line.foreign.get_hostname("")
-    elif attr == entries.SortAttr.FINGERPRINT:
-      return connection_line.foreign.get_fingerprint()
-    elif attr == entries.SortAttr.NICKNAME:
-      my_nickname = connection_line.foreign.get_nickname()
-
-      if my_nickname == "UNKNOWN":
-        return "z" * 20  # orders at the end
-      else:
-        return my_nickname.lower()
-    elif attr == entries.SortAttr.CATEGORY:
-      return Category.index_of(connection_line.get_type())
-    elif attr == entries.SortAttr.UPTIME:
-      return connection_line.start_time
-    elif attr == entries.SortAttr.COUNTRY:
-      if connection.is_private_address(self.lines[0].foreign.get_address()):
-        return ""
-      else:
-        return connection_line.foreign.get_locale("")
-    else:
-      return entries.ConnectionPanelEntry.get_sort_value(self, attr, listing_type)
-
-
-class ConnectionLine(entries.ConnectionPanelLine):
-  """
-  Display component of the ConnectionEntry.
-  """
-
-  def __init__(self, local_address, local_port, remote_address, remote_port, include_port=True, include_expanded_addresses=True):
-    entries.ConnectionPanelLine.__init__(self)
-
-    self.local = Endpoint(local_address, local_port)
-    self.foreign = Endpoint(remote_address, remote_port)
-    self.start_time = time.time()
-    self.is_initial_connection = False
-
-    # overwrite the local fingerprint with ours
-
-    conn = torTools.get_conn()
-    self.local.fingerprint_overwrite = conn.get_info("fingerprint", None)
-
-    # True if the connection has matched the properties of a client/directory
-    # connection every time we've checked. The criteria we check is...
-    #   client    - first hop in an established circuit
-    #   directory - matches an established single-hop circuit (probably a
-    #               directory mirror)
-
-    self._possible_client = True
-    self._possible_directory = True
-
-    # attributes for SOCKS, HIDDEN, and CONTROL connections
-
-    self.application_name = None
-    self.application_pid = None
-    self.is_application_resolving = False
-
-    my_or_port = conn.get_option("ORPort", None)
-    my_dir_port = conn.get_option("DirPort", None)
-    my_socks_port = conn.get_option("SocksPort", "9050")
-    my_ctl_port = conn.get_option("ControlPort", None)
-    my_hidden_service_ports = conn.get_hidden_service_ports()
-
-    # the ORListenAddress can overwrite the ORPort
-
-    listen_addr = conn.get_option("ORListenAddress", None)
-
-    if listen_addr and ":" in listen_addr:
-      my_or_port = listen_addr[listen_addr.find(":") + 1:]
-
-    if local_port in (my_or_port, my_dir_port):
-      self.base_type = Category.INBOUND
-      self.local.is_not_or_port = False
-    elif local_port == my_socks_port:
-      self.base_type = Category.SOCKS
-    elif remote_port in my_hidden_service_ports:
-      self.base_type = Category.HIDDEN
-    elif local_port == my_ctl_port:
-      self.base_type = Category.CONTROL
-    else:
-      self.base_type = Category.OUTBOUND
-      self.foreign.is_not_or_port = False
-
-    self.cached_type = None
-
-    # includes the port or expanded ip address field when displaying listing
-    # information if true
-
-    self.include_port = include_port
-    self.include_expanded_addresses = include_expanded_addresses
-
-    # cached immutable values used for sorting
-
-    ip_value = 0
-
-    for comp in self.foreign.get_address().split("."):
-      ip_value *= 255
-      ip_value += int(comp)
-
-    self.sort_address = ip_value
-    self.sort_port = int(self.foreign.get_port())
-
-  def get_listing_entry(self, width, current_time, listing_type):
-    """
-    Provides the tuple list for this connection's listing. Lines are composed
-    of the following components:
-      <src>  -->  <dst>     <etc>     <uptime> (<type>)
-
-    ListingType.IP_ADDRESS:
-      src - <internal addr:port> --> <external addr:port>
-      dst - <destination addr:port>
-      etc - <fingerprint> <nickname>
-
-    ListingType.HOSTNAME:
-      src - localhost:<port>
-      dst - <destination hostname:port>
-      etc - <destination addr:port> <fingerprint> <nickname>
-
-    ListingType.FINGERPRINT:
-      src - localhost
-      dst - <destination fingerprint>
-      etc - <nickname> <destination addr:port>
-
-    ListingType.NICKNAME:
-      src - <source nickname>
-      dst - <destination nickname>
-      etc - <fingerprint> <destination addr:port>
-
-    Arguments:
-      width       - maximum length of the line
-      current_time - unix timestamp for what the results should consider to be
-                    the current time
-      listing_type - primary attribute we're listing connections by
-    """
-
-    # fetch our (most likely cached) display entry for the listing
-
-    my_listing = entries.ConnectionPanelLine.get_listing_entry(self, width, current_time, listing_type)
-
-    # fill in the current uptime and return the results
-
-    if CONFIG["features.connection.markInitialConnections"]:
-      time_prefix = "+" if self.is_initial_connection else " "
-    else:
-      time_prefix = ""
-
-    time_label = time_prefix + "%5s" % str_tools.get_time_label(current_time - self.start_time, 1)
-    my_listing[2] = (time_label, my_listing[2][1])
-
-    return my_listing
-
-  def is_unresolved_application(self):
-    """
-    True if our display uses application information that hasn't yet been resolved.
-    """
-
-    return self.application_name is None and self.get_type() in (Category.SOCKS, Category.HIDDEN, Category.CONTROL)
-
-  def _get_listing_entry(self, width, current_time, listing_type):
-    entry_type = self.get_type()
-
-    # Lines are split into the following components in reverse:
-    # init gap - " "
-    # content  - "<src>  -->  <dst>     <etc>     "
-    # time     - "<uptime>"
-    # preType  - " ("
-    # category - "<type>"
-    # postType - ")   "
-
-    line_format = uiTools.get_color(CATEGORY_COLOR[entry_type])
-    time_width = 6 if CONFIG["features.connection.markInitialConnections"] else 5
-
-    draw_entry = [(" ", line_format),
-                  (self._get_listing_content(width - (12 + time_width) - 1, listing_type), line_format),
-                  (" " * time_width, line_format),
-                  (" (", line_format),
-                  (entry_type.upper(), line_format | curses.A_BOLD),
-                  (")" + " " * (9 - len(entry_type)), line_format)]
-
-    return draw_entry
-
-  def _get_details(self, width):
-    """
-    Provides details on the connection, correlated against available consensus
-    data.
-
-    Arguments:
-      width - available space to display in
-    """
-
-    detail_format = curses.A_BOLD | uiTools.get_color(CATEGORY_COLOR[self.get_type()])
-    return [(line, detail_format) for line in self._get_detail_content(width)]
-
-  def reset_display(self):
-    entries.ConnectionPanelLine.reset_display(self)
-    self.cached_type = None
-
-  def is_private(self):
-    """
-    Returns true if the endpoint is private, possibly belonging to a client
-    connection or exit traffic.
-    """
-
-    if not CONFIG["features.connection.showIps"]:
-      return True
-
-    # This is used to scrub private information from the interface. Relaying
-    # etiquette (and wiretapping laws) say these are bad things to look at so
-    # DON'T CHANGE THIS UNLESS YOU HAVE A DAMN GOOD REASON!
-
-    my_type = self.get_type()
-
-    if my_type == Category.INBOUND:
-      # if we're a guard or bridge and the connection doesn't belong to a
-      # known relay then it might be client traffic
-
-      conn = torTools.get_conn()
-
-      if "Guard" in conn.get_my_flags([]) or conn.get_option("BridgeRelay", None) == "1":
-        all_matches = conn.get_relay_fingerprint(self.foreign.get_address(), get_all_matches = True)
-
-        return all_matches == []
-    elif my_type == Category.EXIT:
-      # DNS connections exiting us aren't private (since they're hitting our
-      # resolvers). Everything else, however, is.
-
-      # TODO: Ideally this would also double check that it's a UDP connection
-      # (since DNS is the only UDP connections Tor will relay), however this
-      # will take a bit more work to propagate the information up from the
-      # connection resolver.
-
-      return self.foreign.get_port() != "53"
-
-    # for everything else this isn't a concern
-
-    return False
-
-  def get_type(self):
-    """
-    Provides our best guess at the current type of the connection. This
-    depends on consensus results, our current client circuits, etc. Results
-    are cached until this entry's display is reset.
-    """
-
-    # caches both to simplify the calls and to keep the type consistent until
-    # we want to reflect changes
-
-    if not self.cached_type:
-      if self.base_type == Category.OUTBOUND:
-        # Currently the only non-static categories are OUTBOUND vs...
-        # - EXIT since this depends on the current consensus
-        # - CIRCUIT if this is likely to belong to our guard usage
-        # - DIRECTORY if this is a single-hop circuit (directory mirror?)
-        #
-        # The exitability, circuits, and fingerprints are all cached by the
-        # torTools util keeping this a quick lookup.
-
-        conn = torTools.get_conn()
-        destination_fingerprint = self.foreign.get_fingerprint()
-
-        if destination_fingerprint == "UNKNOWN":
-          # Not a known relay. This might be an exit connection.
-
-          if conn.is_exiting_allowed(self.foreign.get_address(), self.foreign.get_port()):
-            self.cached_type = Category.EXIT
-        elif self._possible_client or self._possible_directory:
-          # This belongs to a known relay. If we haven't eliminated ourselves as
-          # a possible client or directory connection then check if it still
-          # holds true.
-
-          my_circuits = conn.get_circuits()
-
-          if self._possible_client:
-            # Checks that this belongs to the first hop in a circuit that's
-            # either unestablished or longer than a single hop (ie, anything but
-            # a built 1-hop connection since those are most likely a directory
-            # mirror).
-
-            for _, status, _, path in my_circuits:
-              if path and path[0] == destination_fingerprint and (status != "BUILT" or len(path) > 1):
-                self.cached_type = Category.CIRCUIT  # matched a probable guard connection
-
-            # if we fell through, we can eliminate ourselves as a guard in the future
-            if not self.cached_type:
-              self._possible_client = False
-
-          if self._possible_directory:
-            # Checks if we match a built, single hop circuit.
-
-            for _, status, _, path in my_circuits:
-              if path and path[0] == destination_fingerprint and status == "BUILT" and len(path) == 1:
-                self.cached_type = Category.DIRECTORY
-
-            # if we fell through, eliminate ourselves as a directory connection
-            if not self.cached_type:
-              self._possible_directory = False
-
-      if not self.cached_type:
-        self.cached_type = self.base_type
-
-    return self.cached_type
-
-  def get_etc_content(self, width, listing_type):
-    """
-    Provides the optional content for the connection.
-
-    Arguments:
-      width       - maximum length of the line
-      listing_type - primary attribute we're listing connections by
-    """
-
-    # for applications show the command/pid
-
-    if self.get_type() in (Category.SOCKS, Category.HIDDEN, Category.CONTROL):
-      display_label = ""
-
-      if self.application_name:
-        if self.application_pid:
-          display_label = "%s (%s)" % (self.application_name, self.application_pid)
-        else:
-          display_label = self.application_name
-      elif self.is_application_resolving:
-        display_label = "resolving..."
-      else:
-        display_label = "UNKNOWN"
-
-      if len(display_label) < width:
-        return ("%%-%is" % width) % display_label
-      else:
-        return ""
-
-    # for everything else display connection/consensus information
-
-    destination_address = self.get_destination_label(26, include_locale = True)
-    etc, used_space = "", 0
-
-    if listing_type == entries.ListingType.IP_ADDRESS:
-      if width > used_space + 42 and CONFIG["features.connection.showColumn.fingerprint"]:
-        # show fingerprint (column width: 42 characters)
-
-        etc += "%-40s  " % self.foreign.get_fingerprint()
-        used_space += 42
-
-      if width > used_space + 10 and CONFIG["features.connection.showColumn.nickname"]:
-        # show nickname (column width: remainder)
-
-        nickname_space = width - used_space
-        nickname_label = uiTools.crop_str(self.foreign.get_nickname(), nickname_space, 0)
-        etc += ("%%-%is  " % nickname_space) % nickname_label
-        used_space += nickname_space + 2
-    elif listing_type == entries.ListingType.HOSTNAME:
-      if width > used_space + 28 and CONFIG["features.connection.showColumn.destination"]:
-        # show destination ip/port/locale (column width: 28 characters)
-        etc += "%-26s  " % destination_address
-        used_space += 28
-
-      if width > used_space + 42 and CONFIG["features.connection.showColumn.fingerprint"]:
-        # show fingerprint (column width: 42 characters)
-        etc += "%-40s  " % self.foreign.get_fingerprint()
-        used_space += 42
-
-      if width > used_space + 17 and CONFIG["features.connection.showColumn.nickname"]:
-        # show nickname (column width: min 17 characters, uses half of the remainder)
-        nickname_space = 15 + (width - (used_space + 17)) / 2
-        nickname_label = uiTools.crop_str(self.foreign.get_nickname(), nickname_space, 0)
-        etc += ("%%-%is  " % nickname_space) % nickname_label
-        used_space += (nickname_space + 2)
-    elif listing_type == entries.ListingType.FINGERPRINT:
-      if width > used_space + 17:
-        # show nickname (column width: min 17 characters, consumes any remaining space)
-
-        nickname_space = width - used_space - 2
-
-        # if there's room then also show a column with the destination
-        # ip/port/locale (column width: 28 characters)
-
-        is_locale_included = width > used_space + 45
-        is_locale_included &= CONFIG["features.connection.showColumn.destination"]
-
-        if is_locale_included:
-          nickname_space -= 28
-
-        if CONFIG["features.connection.showColumn.nickname"]:
-          nickname_label = uiTools.crop_str(self.foreign.get_nickname(), nickname_space, 0)
-          etc += ("%%-%is  " % nickname_space) % nickname_label
-          used_space += nickname_space + 2
-
-        if is_locale_included:
-          etc += "%-26s  " % destination_address
-          used_space += 28
-    else:
-      if width > used_space + 42 and CONFIG["features.connection.showColumn.fingerprint"]:
-        # show fingerprint (column width: 42 characters)
-        etc += "%-40s  " % self.foreign.get_fingerprint()
-        used_space += 42
-
-      if width > used_space + 28 and CONFIG["features.connection.showColumn.destination"]:
-        # show destination ip/port/locale (column width: 28 characters)
-        etc += "%-26s  " % destination_address
-        used_space += 28
-
-    return ("%%-%is" % width) % etc
-
-  def _get_listing_content(self, width, listing_type):
-    """
-    Provides the source, destination, and extra info for our listing.
-
-    Arguments:
-      width       - maximum length of the line
-      listing_type - primary attribute we're listing connections by
-    """
-
-    conn = torTools.get_conn()
-    my_type = self.get_type()
-    destination_address = self.get_destination_label(26, include_locale = True)
-
-    # The required widths are the sum of the following:
-    # - room for LABEL_FORMAT and LABEL_MIN_PADDING (11 characters)
-    # - base data for the listing
-    # - that extra field plus any previous
-
-    used_space = len(LABEL_FORMAT % tuple([""] * 4)) + LABEL_MIN_PADDING
-    local_port = ":%s" % self.local.get_port() if self.include_port else ""
-
-    src, dst, etc = "", "", ""
-
-    if listing_type == entries.ListingType.IP_ADDRESS:
-      my_external_address = conn.get_info("address", self.local.get_address())
-      address_differ = my_external_address != self.local.get_address()
-
-      # Expanding doesn't make sense, if the connection isn't actually
-      # going through Tor's external IP address. As there isn't a known
-      # method for checking if it is, we're checking the type instead.
-      #
-      # This isn't entirely correct. It might be a better idea to check if
-      # the source and destination addresses are both private, but that might
-      # not be perfectly reliable either.
-
-      is_expansion_type = not my_type in (Category.SOCKS, Category.HIDDEN, Category.CONTROL)
-
-      if is_expansion_type:
-        src_address = my_external_address + local_port
-      else:
-        src_address = self.local.get_address() + local_port
-
-      if my_type in (Category.SOCKS, Category.CONTROL):
-        # Like inbound connections these need their source and destination to
-        # be swapped. However, this only applies when listing by IP or hostname
-        # (their fingerprint and nickname are both for us). Reversing the
-        # fields here to keep the same column alignments.
-
-        src = "%-21s" % destination_address
-        dst = "%-26s" % src_address
-      else:
-        src = "%-21s" % src_address  # ip:port = max of 21 characters
-        dst = "%-26s" % destination_address  # ip:port (xx) = max of 26 characters
-
-      used_space += len(src) + len(dst)  # base data requires 47 characters
-
-      # Showing the fingerprint (which has the width of 42) has priority over
-      # an expanded address field. Hence check if we either have space for
-      # both or wouldn't be showing the fingerprint regardless.
-
-      is_expanded_address_visible = width > used_space + 28
-
-      if is_expanded_address_visible and CONFIG["features.connection.showColumn.fingerprint"]:
-        is_expanded_address_visible = width < used_space + 42 or width > used_space + 70
-
-      if address_differ and is_expansion_type and is_expanded_address_visible and self.include_expanded_addresses and CONFIG["features.connection.showColumn.expandedIp"]:
-        # include the internal address in the src (extra 28 characters)
-
-        internal_address = self.local.get_address() + local_port
-
-        # If this is an inbound connection then reverse ordering so it's:
-        # <foreign> --> <external> --> <internal>
-        # when the src and dst are swapped later
-
-        if my_type == Category.INBOUND:
-          src = "%-21s  -->  %s" % (src, internal_address)
-        else:
-          src = "%-21s  -->  %s" % (internal_address, src)
-
-        used_space += 28
-
-      etc = self.get_etc_content(width - used_space, listing_type)
-      used_space += len(etc)
-    elif listing_type == entries.ListingType.HOSTNAME:
-      # 15 characters for source, and a min of 40 reserved for the destination
-      # TODO: when actually functional the src and dst need to be swapped for
-      # SOCKS and CONTROL connections
-
-      src = "localhost%-6s" % local_port
-      used_space += len(src)
-      min_hostname_space = 40
-
-      etc = self.get_etc_content(width - used_space - min_hostname_space, listing_type)
-      used_space += len(etc)
-
-      hostname_space = width - used_space
-      used_space = width  # prevents padding at the end
-
-      if self.is_private():
-        dst = ("%%-%is" % hostname_space) % "<scrubbed>"
-      else:
-        hostname = self.foreign.get_hostname(self.foreign.get_address())
-        port_label = ":%-5s" % self.foreign.get_port() if self.include_port else ""
-
-        # truncates long hostnames and sets dst to <hostname>:<port>
-
-        hostname = uiTools.crop_str(hostname, hostname_space, 0)
-        dst = ("%%-%is" % hostname_space) % (hostname + port_label)
-    elif listing_type == entries.ListingType.FINGERPRINT:
-      src = "localhost"
-
-      if my_type == Category.CONTROL:
-        dst = "localhost"
-      else:
-        dst = self.foreign.get_fingerprint()
-
-      dst = "%-40s" % dst
-
-      used_space += len(src) + len(dst)  # base data requires 49 characters
-
-      etc = self.get_etc_content(width - used_space, listing_type)
-      used_space += len(etc)
-    else:
-      # base data requires 50 min characters
-      src = self.local.get_nickname()
-
-      if my_type == Category.CONTROL:
-        dst = self.local.get_nickname()
-      else:
-        dst = self.foreign.get_nickname()
-
-      min_base_space = 50
-
-      etc = self.get_etc_content(width - used_space - min_base_space, listing_type)
-      used_space += len(etc)
-
-      base_space = width - used_space
-      used_space = width  # prevents padding at the end
-
-      if len(src) + len(dst) > base_space:
-        src = uiTools.crop_str(src, base_space / 3)
-        dst = uiTools.crop_str(dst, base_space - len(src))
-
-      # pads dst entry to its max space
-
-      dst = ("%%-%is" % (base_space - len(src))) % dst
-
-    if my_type == Category.INBOUND:
-      src, dst = dst, src
-
-    padding = " " * (width - used_space + LABEL_MIN_PADDING)
-
-    return LABEL_FORMAT % (src, dst, etc, padding)
-
-  def _get_detail_content(self, width):
-    """
-    Provides a list with detailed information for this connection.
-
-    Arguments:
-      width - max length of lines
-    """
-
-    lines = [""] * 7
-    lines[0] = "address: %s" % self.get_destination_label(width - 11)
-    lines[1] = "locale: %s" % ("??" if self.is_private() else self.foreign.get_locale("??"))
-
-    # Remaining data concerns the consensus results, with three possible cases:
-    # - if there's a single match then display its details
-    # - if there's multiple potential relays then list all of the combinations
-    #   of ORPorts / Fingerprints
-    # - if no consensus data is available then say so (probably a client or
-    #   exit connection)
-
-    fingerprint = self.foreign.get_fingerprint()
-    conn = torTools.get_conn()
-
-    if fingerprint != "UNKNOWN":
-      # single match - display information available about it
-
-      ns_entry = conn.get_consensus_entry(fingerprint)
-      desc_entry = conn.get_descriptor_entry(fingerprint)
-
-      # append the fingerprint to the second line
-
-      lines[1] = "%-13sfingerprint: %s" % (lines[1], fingerprint)
-
-      if ns_entry:
-        # example consensus entry:
-        # r murble R8sCM1ar1sS2GulQYFVmvN95xsk RJr6q+wkTFG+ng5v2bdCbVVFfA4 2011-02-21 00:25:32 195.43.157.85 443 0
-        # s Exit Fast Guard Named Running Stable Valid
-        # w Bandwidth=2540
-        # p accept 20-23,43,53,79-81,88,110,143,194,443
-
-        ns_lines = ns_entry.split("\n")
-
-        first_line_comp = ns_lines[0].split(" ")
-
-        if len(first_line_comp) >= 9:
-          _, nickname, _, _, published_date, published_time, _, or_port, dir_port = first_line_comp[:9]
-        else:
-          nickname, published_date, published_time, or_port, dir_port = "", "", "", "", ""
-
-        flags = "unknown"
-
-        if len(ns_lines) >= 2 and ns_lines[1].startswith("s "):
-          flags = ns_lines[1][2:]
-
-        exit_policy = conn.get_relay_exit_policy(fingerprint)
-
-        if exit_policy:
-          policy_label = exit_policy.summary()
-        else:
-          policy_label = "unknown"
-
-        dir_port_label = "" if dir_port == "0" else "dirport: %s" % dir_port
-        lines[2] = "nickname: %-25s orport: %-10s %s" % (nickname, or_port, dir_port_label)
-        lines[3] = "published: %s %s" % (published_time, published_date)
-        lines[4] = "flags: %s" % flags.replace(" ", ", ")
-        lines[5] = "exit policy: %s" % policy_label
-
-      if desc_entry:
-        tor_version, platform, contact = "", "", ""
-
-        for desc_line in desc_entry.split("\n"):
-          if desc_line.startswith("platform"):
-            # has the tor version and platform, ex:
-            # platform Tor 0.2.1.29 (r318f470bc5f2ad43) on Linux x86_64
-
-            tor_version = desc_line[13:desc_line.find(" ", 13)]
-            platform = desc_line[desc_line.rfind(" on ") + 4:]
-          elif desc_line.startswith("contact"):
-            contact = desc_line[8:]
-
-            # clears up some highly common obscuring
-
-            for alias in (" at ", " AT "):
-              contact = contact.replace(alias, "@")
-
-            for alias in (" dot ", " DOT "):
-              contact = contact.replace(alias, ".")
-
-            break  # contact lines come after the platform
-
-        lines[3] = "%-35s os: %-14s version: %s" % (lines[3], platform, tor_version)
-
-        # contact information is an optional field
-
-        if contact:
-          lines[6] = "contact: %s" % contact
-    else:
-      all_matches = conn.get_relay_fingerprint(self.foreign.get_address(), get_all_matches = True)
-
-      if all_matches:
-        # multiple matches
-        lines[2] = "Multiple matches, possible fingerprints are:"
-
-        for i in range(len(all_matches)):
-          is_last_line = i == 3
-
-          relay_port, relay_fingerprint = all_matches[i]
-          line_text = "%i. or port: %-5s fingerprint: %s" % (i, relay_port, relay_fingerprint)
-
-          # if there's multiple lines remaining at the end then give a count
-
-          remaining_relays = len(all_matches) - i
-
-          if is_last_line and remaining_relays > 1:
-            line_text = "... %i more" % remaining_relays
-
-          lines[3 + i] = line_text
-
-          if is_last_line:
-            break
-      else:
-        # no consensus entry for this ip address
-        lines[2] = "No consensus data found"
-
-    # crops any lines that are too long
-
-    for i in range(len(lines)):
-      lines[i] = uiTools.crop_str(lines[i], width - 2)
-
-    return lines
-
-  def get_destination_label(self, max_length, include_locale = False, include_hostname = False):
-    """
-    Provides a short description of the destination. This is made up of two
-    components, the base <ip addr>:<port> and an extra piece of information in
-    parentheses. The IP address is scrubbed from private connections.
-
-    Extra information is...
-    - the port's purpose for exit connections
-    - the locale and/or hostname if set to do so, the address isn't private,
-      and isn't on the local network
-    - nothing otherwise
-
-    Arguments:
-      max_length       - maximum length of the string returned
-      include_locale   - possibly includes the locale
-      include_hostname - possibly includes the hostname
-    """
-
-    # the port and port derived data can be hidden by config or without include_port
-
-    include_port = self.include_port and (CONFIG["features.connection.showExitPort"] or self.get_type() != Category.EXIT)
-
-    # destination of the connection
-
-    address_label = "<scrubbed>" if self.is_private() else self.foreign.get_address()
-    port_label = ":%s" % self.foreign.get_port() if include_port else ""
-    destination_address = address_label + port_label
-
-    # Only append the extra info if there's at least a couple characters of
-    # space (this is what's needed for the country codes).
-
-    if len(destination_address) + 5 <= max_length:
-      space_available = max_length - len(destination_address) - 3
-
-      if self.get_type() == Category.EXIT and include_port:
-        purpose = connection.port_usage(self.foreign.get_port())
-
-        if purpose:
-          # BitTorrent is a common protocol to truncate, so just use "Torrent"
-          # if there's not enough room.
-
-          if len(purpose) > space_available and purpose == "BitTorrent":
-            purpose = "Torrent"
-
-          # crops with a hyphen if too long
-
-          purpose = uiTools.crop_str(purpose, space_available, end_type = uiTools.Ending.HYPHEN)
-
-          destination_address += " (%s)" % purpose
-      elif not connection.is_private_address(self.foreign.get_address()):
-        extra_info = []
-        conn = torTools.get_conn()
-
-        if include_locale and not conn.is_geoip_unavailable():
-          foreign_locale = self.foreign.get_locale("??")
-          extra_info.append(foreign_locale)
-          space_available -= len(foreign_locale) + 2
-
-        if include_hostname:
-          destination_hostname = self.foreign.get_hostname()
-
-          if destination_hostname:
-            # determines the full space available, taking into account the ", "
-            # dividers if there's multiple pieces of extra data
-
-            max_hostname_space = space_available - 2 * len(extra_info)
-            destination_hostname = uiTools.crop_str(destination_hostname, max_hostname_space)
-            extra_info.append(destination_hostname)
-            space_available -= len(destination_hostname)
-
-        if extra_info:
-          destination_address += " (%s)" % ", ".join(extra_info)
-
-    return destination_address[:max_length]
diff --git a/arm/connections/connPanel.py b/arm/connections/connPanel.py
deleted file mode 100644
index f0b5ebc..0000000
--- a/arm/connections/connPanel.py
+++ /dev/null
@@ -1,668 +0,0 @@
-"""
-Listing of the currently established connections tor has made.
-"""
-
-import re
-import time
-import curses
-import threading
-
-import arm.popups
-import arm.util.tracker
-
-from arm.connections import countPopup, descriptorPopup, entries, connEntry, circEntry
-from arm.util import panel, torTools, tracker, uiTools
-
-from stem.control import State
-from stem.util import conf, connection, enum
-
-# height of the detail panel content, not counting top and bottom border
-
-DETAILS_HEIGHT = 7
-
-# listing types
-
-Listing = enum.Enum(("IP_ADDRESS", "IP Address"), "HOSTNAME", "FINGERPRINT", "NICKNAME")
-
-
-def conf_handler(key, value):
-  if key == "features.connection.listing_type":
-    return conf.parse_enum(key, value, Listing)
-  elif key == "features.connection.refreshRate":
-    return max(1, value)
-  elif key == "features.connection.order":
-    return conf.parse_enum_csv(key, value[0], entries.SortAttr, 3)
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.connection.resolveApps": True,
-  "features.connection.listing_type": Listing.IP_ADDRESS,
-  "features.connection.order": [
-    entries.SortAttr.CATEGORY,
-    entries.SortAttr.LISTING,
-    entries.SortAttr.UPTIME],
-  "features.connection.refreshRate": 5,
-  "features.connection.showIps": True,
-}, conf_handler)
-
-
-class ConnectionPanel(panel.Panel, threading.Thread):
-  """
-  Listing of connections tor is making, with information correlated against
-  the current consensus and other data sources.
-  """
-
-  def __init__(self, stdscr):
-    panel.Panel.__init__(self, stdscr, "connections", 0)
-    threading.Thread.__init__(self)
-    self.setDaemon(True)
-
-    # defaults our listing selection to fingerprints if ip address
-    # displaying is disabled
-    #
-    # TODO: This is a little sucky in that it won't work if showIps changes
-    # while we're running (... but arm doesn't allow for that atm)
-
-    if not CONFIG["features.connection.showIps"] and CONFIG["features.connection.listing_type"] == 0:
-      arm_config = conf.get_config("arm")
-      arm_config.set("features.connection.listing_type", Listing.keys()[Listing.index_of(Listing.FINGERPRINT)])
-
-    self._scroller = uiTools.Scroller(True)
-    self._title = "Connections:"  # title line of the panel
-    self._entries = []            # last fetched display entries
-    self._entry_lines = []        # individual lines rendered from the entries listing
-    self._show_details = False    # presents the details panel if true
-
-    self._last_update = -1        # time the content was last revised
-    self._is_tor_running = True   # indicates if tor is currently running or not
-    self._halt_time = None        # time when tor was stopped
-    self._halt = False            # terminates thread if true
-    self._cond = threading.Condition()  # used for pausing the thread
-    self.vals_lock = threading.RLock()
-
-    # Tracks exiting port and client country statistics
-
-    self._client_locale_usage = {}
-    self._exit_port_usage = {}
-
-    # If we're a bridge and been running over a day then prepopulates with the
-    # last day's clients.
-
-    conn = torTools.get_conn()
-    bridge_clients = conn.get_info("status/clients-seen", None)
-
-    if bridge_clients:
-      # Response has a couple arguments...
-      # TimeStarted="2011-08-17 15:50:49" CountrySummary=us=16,de=8,uk=8
-
-      country_summary = None
-
-      for arg in bridge_clients.split():
-        if arg.startswith("CountrySummary="):
-          country_summary = arg[15:]
-          break
-
-      if country_summary:
-        for entry in country_summary.split(","):
-          if re.match("^..=[0-9]+$", entry):
-            locale, count = entry.split("=", 1)
-            self._client_locale_usage[locale] = int(count)
-
-    # Last sampling received from the ConnectionResolver, used to detect when
-    # it changes.
-
-    self._last_resource_fetch = -1
-
-    # resolver for the command/pid associated with SOCKS, HIDDEN, and CONTROL connections
-
-    self._app_resolver = tracker.get_port_usage_tracker()
-
-    # rate limits appResolver queries to once per update
-
-    self.app_resolve_since_update = False
-
-    # mark the initially exitsing connection uptimes as being estimates
-
-    for entry in self._entries:
-      if isinstance(entry, connEntry.ConnectionEntry):
-        entry.getLines()[0].is_initial_connection = True
-
-    # listens for when tor stops so we know to stop reflecting changes
-
-    conn.add_status_listener(self.tor_state_listener)
-
-  def tor_state_listener(self, controller, event_type, _):
-    """
-    Freezes the connection contents when Tor stops.
-    """
-
-    self._is_tor_running = event_type in (State.INIT, State.RESET)
-
-    if self._is_tor_running:
-      self._halt_time = None
-    else:
-      self._halt_time = time.time()
-
-    self.redraw(True)
-
-  def get_pause_time(self):
-    """
-    Provides the time Tor stopped if it isn't running. Otherwise this is the
-    time we were last paused.
-    """
-
-    if self._halt_time:
-      return self._halt_time
-    else:
-      return panel.Panel.get_pause_time(self)
-
-  def set_sort_order(self, ordering = None):
-    """
-    Sets the connection attributes we're sorting by and resorts the contents.
-
-    Arguments:
-      ordering - new ordering, if undefined then this resorts with the last
-                 set ordering
-    """
-
-    self.vals_lock.acquire()
-
-    if ordering:
-      arm_config = conf.get_config("arm")
-
-      ordering_keys = [entries.SortAttr.keys()[entries.SortAttr.index_of(v)] for v in ordering]
-      arm_config.set("features.connection.order", ", ".join(ordering_keys))
-
-    self._entries.sort(key = lambda i: (i.get_sort_values(CONFIG["features.connection.order"], self.get_listing_type())))
-
-    self._entry_lines = []
-
-    for entry in self._entries:
-      self._entry_lines += entry.getLines()
-
-    self.vals_lock.release()
-
-  def get_listing_type(self):
-    """
-    Provides the priority content we list connections by.
-    """
-
-    return CONFIG["features.connection.listing_type"]
-
-  def set_listing_type(self, listing_type):
-    """
-    Sets the priority information presented by the panel.
-
-    Arguments:
-      listing_type - Listing instance for the primary information to be shown
-    """
-
-    if self.get_listing_type() == listing_type:
-      return
-
-    self.vals_lock.acquire()
-
-    arm_config = conf.get_config("arm")
-    arm_config.set("features.connection.listing_type", Listing.keys()[Listing.index_of(listing_type)])
-
-    # if we're sorting by the listing then we need to resort
-
-    if entries.SortAttr.LISTING in CONFIG["features.connection.order"]:
-      self.set_sort_order()
-
-    self.vals_lock.release()
-
-  def is_clients_allowed(self):
-    """
-    True if client connections are permissable, false otherwise.
-    """
-
-    conn = torTools.get_conn()
-    return "Guard" in conn.get_my_flags([]) or conn.get_option("BridgeRelay", None) == "1"
-
-  def is_exits_allowed(self):
-    """
-    True if exit connections are permissable, false otherwise.
-    """
-
-    if not torTools.get_conn().get_option("ORPort", None):
-      return False  # no ORPort
-
-    policy = torTools.get_conn().get_exit_policy()
-
-    return policy and policy.is_exiting_allowed()
-
-  def show_sort_dialog(self):
-    """
-    Provides the sort dialog for our connections.
-    """
-
-    # set ordering for connection options
-
-    title_label = "Connection Ordering:"
-    options = list(entries.SortAttr)
-    old_selection = CONFIG["features.connection.order"]
-    option_colors = dict([(attr, entries.SORT_COLORS[attr]) for attr in options])
-    results = arm.popups.show_sort_dialog(title_label, options, old_selection, option_colors)
-
-    if results:
-      self.set_sort_order(results)
-
-  def handle_key(self, key):
-    self.vals_lock.acquire()
-
-    is_keystroke_consumed = True
-
-    if uiTools.is_scroll_key(key):
-      page_height = self.get_preferred_size()[0] - 1
-
-      if self._show_details:
-        page_height -= (DETAILS_HEIGHT + 1)
-
-      is_changed = self._scroller.handle_key(key, self._entry_lines, page_height)
-
-      if is_changed:
-        self.redraw(True)
-    elif uiTools.is_selection_key(key):
-      self._show_details = not self._show_details
-      self.redraw(True)
-    elif key == ord('s') or key == ord('S'):
-      self.show_sort_dialog()
-    elif key == ord('u') or key == ord('U'):
-      # provides a menu to pick the connection resolver
-
-      title = "Resolver Util:"
-      options = ["auto"] + list(connection.Resolver)
-      conn_resolver = arm.util.tracker.get_connection_tracker()
-
-      current_overwrite = conn_resolver.get_custom_resolver()
-
-      if current_overwrite is None:
-        old_selection = 0
-      else:
-        old_selection = options.index(current_overwrite)
-
-      selection = arm.popups.show_menu(title, options, old_selection)
-
-      # applies new setting
-
-      if selection != -1:
-        selected_option = options[selection] if selection != 0 else None
-        conn_resolver.set_custom_resolver(selected_option)
-    elif key == ord('l') or key == ord('L'):
-      # provides a menu to pick the primary information we list connections by
-
-      title = "List By:"
-      options = list(entries.ListingType)
-
-      # dropping the HOSTNAME listing type until we support displaying that content
-
-      options.remove(arm.connections.entries.ListingType.HOSTNAME)
-
-      old_selection = options.index(self.get_listing_type())
-      selection = arm.popups.show_menu(title, options, old_selection)
-
-      # applies new setting
-
-      if selection != -1:
-        self.set_listing_type(options[selection])
-    elif key == ord('d') or key == ord('D'):
-      # presents popup for raw consensus data
-      descriptorPopup.show_descriptor_popup(self)
-    elif (key == ord('c') or key == ord('C')) and self.is_clients_allowed():
-      countPopup.showCountDialog(countPopup.CountType.CLIENT_LOCALE, self._client_locale_usage)
-    elif (key == ord('e') or key == ord('E')) and self.is_exits_allowed():
-      countPopup.showCountDialog(countPopup.CountType.EXIT_PORT, self._exit_port_usage)
-    else:
-      is_keystroke_consumed = False
-
-    self.vals_lock.release()
-    return is_keystroke_consumed
-
-  def run(self):
-    """
-    Keeps connections listing updated, checking for new entries at a set rate.
-    """
-
-    last_draw = time.time() - 1
-
-    # Fetches out initial connection results. The wait is so this doesn't
-    # run during arm's interface initialization (otherwise there's a
-    # noticeable pause before the first redraw).
-
-    self._cond.acquire()
-    self._cond.wait(0.2)
-    self._cond.release()
-    self._update()             # populates initial entries
-    self._resolve_apps(False)  # resolves initial applications
-
-    while not self._halt:
-      current_time = time.time()
-
-      if self.is_paused() or not self._is_tor_running or current_time - last_draw < CONFIG["features.connection.refreshRate"]:
-        self._cond.acquire()
-
-        if not self._halt:
-          self._cond.wait(0.2)
-
-        self._cond.release()
-      else:
-        # updates content if their's new results, otherwise just redraws
-
-        self._update()
-        self.redraw(True)
-
-        # we may have missed multiple updates due to being paused, showing
-        # another panel, etc so last_draw might need to jump multiple ticks
-
-        draw_ticks = (time.time() - last_draw) / CONFIG["features.connection.refreshRate"]
-        last_draw += CONFIG["features.connection.refreshRate"] * draw_ticks
-
-  def get_help(self):
-    resolver_util = arm.util.tracker.get_connection_tracker().get_custom_resolver()
-
-    if resolver_util is None:
-      resolver_util = "auto"
-
-    options = []
-    options.append(("up arrow", "scroll up a line", None))
-    options.append(("down arrow", "scroll down a line", None))
-    options.append(("page up", "scroll up a page", None))
-    options.append(("page down", "scroll down a page", None))
-    options.append(("enter", "show connection details", None))
-    options.append(("d", "raw consensus descriptor", None))
-
-    if self.is_clients_allowed():
-      options.append(("c", "client locale usage summary", None))
-
-    if self.is_exits_allowed():
-      options.append(("e", "exit port usage summary", None))
-
-    options.append(("l", "listed identity", self.get_listing_type().lower()))
-    options.append(("s", "sort ordering", None))
-    options.append(("u", "resolving utility", resolver_util))
-    return options
-
-  def get_selection(self):
-    """
-    Provides the currently selected connection entry.
-    """
-
-    return self._scroller.get_cursor_selection(self._entry_lines)
-
-  def draw(self, width, height):
-    self.vals_lock.acquire()
-
-    # if we don't have any contents then refuse to show details
-
-    if not self._entries:
-      self._show_details = False
-
-    # extra line when showing the detail panel is for the bottom border
-
-    detail_panel_offset = DETAILS_HEIGHT + 1 if self._show_details else 0
-    is_scrollbar_visible = len(self._entry_lines) > height - detail_panel_offset - 1
-
-    scroll_location = self._scroller.get_scroll_location(self._entry_lines, height - detail_panel_offset - 1)
-    cursor_selection = self.get_selection()
-
-    # draws the detail panel if currently displaying it
-
-    if self._show_details and cursor_selection:
-      # This is a solid border unless the scrollbar is visible, in which case a
-      # 'T' pipe connects the border to the bar.
-
-      uiTools.draw_box(self, 0, 0, width, DETAILS_HEIGHT + 2)
-
-      if is_scrollbar_visible:
-        self.addch(DETAILS_HEIGHT + 1, 1, curses.ACS_TTEE)
-
-      draw_entries = cursor_selection.get_details(width)
-
-      for i in range(min(len(draw_entries), DETAILS_HEIGHT)):
-        self.addstr(1 + i, 2, draw_entries[i][0], draw_entries[i][1])
-
-    # title label with connection counts
-
-    if self.is_title_visible():
-      title = "Connection Details:" if self._show_details else self._title
-      self.addstr(0, 0, title, curses.A_STANDOUT)
-
-    scroll_offset = 0
-
-    if is_scrollbar_visible:
-      scroll_offset = 2
-      self.add_scroll_bar(scroll_location, scroll_location + height - detail_panel_offset - 1, len(self._entry_lines), 1 + detail_panel_offset)
-
-    if self.is_paused() or not self._is_tor_running:
-      current_time = self.get_pause_time()
-    else:
-      current_time = time.time()
-
-    for line_number in range(scroll_location, len(self._entry_lines)):
-      entry_line = self._entry_lines[line_number]
-
-      # if this is an unresolved SOCKS, HIDDEN, or CONTROL entry then queue up
-      # resolution for the applicaitions they belong to
-
-      if isinstance(entry_line, connEntry.ConnectionLine) and entry_line.is_unresolved_application():
-        self._resolve_apps()
-
-      # hilighting if this is the selected line
-
-      extra_format = curses.A_STANDOUT if entry_line == cursor_selection else curses.A_NORMAL
-
-      draw_line = line_number + detail_panel_offset + 1 - scroll_location
-
-      prefix = entry_line.get_listing_prefix()
-
-      for i in range(len(prefix)):
-        self.addch(draw_line, scroll_offset + i, prefix[i])
-
-      x_offset = scroll_offset + len(prefix)
-      draw_entry = entry_line.get_listing_entry(width - scroll_offset - len(prefix), current_time, self.get_listing_type())
-
-      for msg, attr in draw_entry:
-        attr |= extra_format
-        self.addstr(draw_line, x_offset, msg, attr)
-        x_offset += len(msg)
-
-      if draw_line >= height:
-        break
-
-    self.vals_lock.release()
-
-  def stop(self):
-    """
-    Halts further resolutions and terminates the thread.
-    """
-
-    self._cond.acquire()
-    self._halt = True
-    self._cond.notifyAll()
-    self._cond.release()
-
-  def _update(self):
-    """
-    Fetches the newest resolved connections.
-    """
-
-    self.app_resolve_since_update = False
-
-    # if we don't have an initialized resolver then this is a no-op
-
-    if not arm.util.tracker.get_connection_tracker().is_alive():
-      return
-
-    conn_resolver = arm.util.tracker.get_connection_tracker()
-    current_resolution_count = conn_resolver.run_counter()
-
-    self.vals_lock.acquire()
-
-    new_entries = []  # the new results we'll display
-
-    # Fetches new connections and client circuits...
-    # new_connections  [(local ip, local port, foreign ip, foreign port)...]
-    # new_circuits     {circuit_id => (status, purpose, path)...}
-
-    new_connections = [(conn.local_address, conn.local_port, conn.remote_address, conn.remote_port) for conn in conn_resolver.get_connections()]
-    new_circuits = {}
-
-    for circuit_id, status, purpose, path in torTools.get_conn().get_circuits():
-      # Skips established single-hop circuits (these are for directory
-      # fetches, not client circuits)
-
-      if not (status == "BUILT" and len(path) == 1):
-        new_circuits[circuit_id] = (status, purpose, path)
-
-    # Populates new_entries with any of our old entries that still exist.
-    # This is both for performance and to keep from resetting the uptime
-    # attributes. Note that CircEntries are a ConnectionEntry subclass so
-    # we need to check for them first.
-
-    for old_entry in self._entries:
-      if isinstance(old_entry, circEntry.CircEntry):
-        new_entry = new_circuits.get(old_entry.circuit_id)
-
-        if new_entry:
-          old_entry.update(new_entry[0], new_entry[2])
-          new_entries.append(old_entry)
-          del new_circuits[old_entry.circuit_id]
-      elif isinstance(old_entry, connEntry.ConnectionEntry):
-        connection_line = old_entry.getLines()[0]
-        conn_attr = (connection_line.local.get_address(), connection_line.local.get_port(),
-                     connection_line.foreign.get_address(), connection_line.foreign.get_port())
-
-        if conn_attr in new_connections:
-          new_entries.append(old_entry)
-          new_connections.remove(conn_attr)
-
-    # Reset any display attributes for the entries we're keeping
-
-    for entry in new_entries:
-      entry.reset_display()
-
-    # Adds any new connection and circuit entries.
-
-    for local_address, local_port, remote_address, remote_port in new_connections:
-      new_conn_entry = connEntry.ConnectionEntry(local_address, local_port, remote_address, remote_port)
-      new_conn_line = new_conn_entry.getLines()[0]
-
-      if new_conn_line.get_type() != connEntry.Category.CIRCUIT:
-        new_entries.append(new_conn_entry)
-
-        # updates exit port and client locale usage information
-        if new_conn_line.is_private():
-          if new_conn_line.get_type() == connEntry.Category.INBOUND:
-            # client connection, update locale information
-
-            client_locale = new_conn_line.foreign.get_locale()
-
-            if client_locale:
-              self._client_locale_usage[client_locale] = self._client_locale_usage.get(client_locale, 0) + 1
-          elif new_conn_line.get_type() == connEntry.Category.EXIT:
-            exit_port = new_conn_line.foreign.get_port()
-            self._exit_port_usage[exit_port] = self._exit_port_usage.get(exit_port, 0) + 1
-
-    for circuit_id in new_circuits:
-      status, purpose, path = new_circuits[circuit_id]
-      new_entries.append(circEntry.CircEntry(circuit_id, status, purpose, path))
-
-    # Counts the relays in each of the categories. This also flushes the
-    # type cache for all of the connections (in case its changed since last
-    # fetched).
-
-    category_types = list(connEntry.Category)
-    type_counts = dict((type, 0) for type in category_types)
-
-    for entry in new_entries:
-      if isinstance(entry, connEntry.ConnectionEntry):
-        type_counts[entry.getLines()[0].get_type()] += 1
-      elif isinstance(entry, circEntry.CircEntry):
-        type_counts[connEntry.Category.CIRCUIT] += 1
-
-    # makes labels for all the categories with connections (ie,
-    # "21 outbound", "1 control", etc)
-
-    count_labels = []
-
-    for category in category_types:
-      if type_counts[category] > 0:
-        count_labels.append("%i %s" % (type_counts[category], category.lower()))
-
-    if count_labels:
-      self._title = "Connections (%s):" % ", ".join(count_labels)
-    else:
-      self._title = "Connections:"
-
-    self._entries = new_entries
-
-    self._entry_lines = []
-
-    for entry in self._entries:
-      self._entry_lines += entry.getLines()
-
-    self.set_sort_order()
-    self._last_resource_fetch = current_resolution_count
-    self.vals_lock.release()
-
-  def _resolve_apps(self, flag_query = True):
-    """
-    Triggers an asynchronous query for all unresolved SOCKS, HIDDEN, and
-    CONTROL entries.
-
-    Arguments:
-      flag_query - sets a flag to prevent further call from being respected
-                  until the next update if true
-    """
-
-    if self.app_resolve_since_update or not CONFIG["features.connection.resolveApps"]:
-      return
-
-    unresolved_lines = [l for l in self._entry_lines if isinstance(l, connEntry.ConnectionLine) and l.is_unresolved_application()]
-
-    # get the ports used for unresolved applications
-
-    app_ports = []
-
-    for line in unresolved_lines:
-      app_conn = line.local if line.get_type() == connEntry.Category.HIDDEN else line.foreign
-      app_ports.append(app_conn.get_port())
-
-    # Queue up resolution for the unresolved ports (skips if it's still working
-    # on the last query).
-
-    if app_ports and not self._app_resolver.is_alive():
-      self._app_resolver.get_processes_using_ports(app_ports)
-
-    # Fetches results. If the query finishes quickly then this is what we just
-    # asked for, otherwise these belong to an earlier resolution.
-    #
-    # The application resolver might have given up querying (for instance, if
-    # the lsof lookups aren't working on this platform or lacks permissions).
-    # The is_application_resolving flag lets the unresolved entries indicate if there's
-    # a lookup in progress for them or not.
-
-    time.sleep(0.2)  # TODO: previous resolver only blocked while awaiting a lookup
-    app_results = self._app_resolver.get_processes_using_ports(app_ports)
-
-    for line in unresolved_lines:
-      is_local = line.get_type() == connEntry.Category.HIDDEN
-      line_port = line.local.get_port() if is_local else line.foreign.get_port()
-
-      if line_port in app_results:
-        # sets application attributes if there's a result with this as the
-        # inbound port
-
-        for inbound_port, outbound_port, cmd, pid in app_results[line_port]:
-          app_port = outbound_port if is_local else inbound_port
-
-          if line_port == app_port:
-            line.application_name = cmd
-            line.application_pid = pid
-            line.is_application_resolving = False
-      else:
-        line.is_application_resolving = self._app_resolver.is_alive
-
-    if flag_query:
-      self.app_resolve_since_update = True
diff --git a/arm/connections/conn_entry.py b/arm/connections/conn_entry.py
new file mode 100644
index 0000000..a2bf55c
--- /dev/null
+++ b/arm/connections/conn_entry.py
@@ -0,0 +1,957 @@
+"""
+Connection panel entries related to actual connections to or from the system
+(ie, results seen by netstat, lsof, etc).
+"""
+
+import time
+import curses
+
+from arm.util import tor_tools, ui_tools
+from arm.connections import entries
+
+from stem.util import conf, connection, enum, str_tools
+
+# Connection Categories:
+#   Inbound      Relay connection, coming to us.
+#   Outbound     Relay connection, leaving us.
+#   Exit         Outbound relay connection leaving the Tor network.
+#   Hidden       Connections to a hidden service we're providing.
+#   Socks        Socks connections for applications using Tor.
+#   Circuit      Circuits our tor client has created.
+#   Directory    Fetching tor consensus information.
+#   Control      Tor controller (arm, vidalia, etc).
+
+Category = enum.Enum("INBOUND", "OUTBOUND", "EXIT", "HIDDEN", "SOCKS", "CIRCUIT", "DIRECTORY", "CONTROL")
+
+CATEGORY_COLOR = {
+  Category.INBOUND: "green",
+  Category.OUTBOUND: "blue",
+  Category.EXIT: "red",
+  Category.HIDDEN: "magenta",
+  Category.SOCKS: "yellow",
+  Category.CIRCUIT: "cyan",
+  Category.DIRECTORY: "magenta",
+  Category.CONTROL: "red",
+}
+
+# static data for listing format
+# <src>  -->  <dst>  <etc><padding>
+
+LABEL_FORMAT = "%s  -->  %s  %s%s"
+LABEL_MIN_PADDING = 2  # min space between listing label and following data
+
+# sort value for scrubbed ip addresses
+
+SCRUBBED_IP_VAL = 255 ** 4
+
+CONFIG = conf.config_dict("arm", {
+  "features.connection.markInitialConnections": True,
+  "features.connection.showIps": True,
+  "features.connection.showExitPort": True,
+  "features.connection.showColumn.fingerprint": True,
+  "features.connection.showColumn.nickname": True,
+  "features.connection.showColumn.destination": True,
+  "features.connection.showColumn.expandedIp": True,
+})
+
+
+class Endpoint:
+  """
+  Collection of attributes associated with a connection endpoint. This is a
+  thin wrapper for torUtil functions, making use of its caching for
+  performance.
+  """
+
+  def __init__(self, address, port):
+    self.address = address
+    self.port = port
+
+    # if true, we treat the port as an definitely not being an ORPort when
+    # searching for matching fingerprints (otherwise we use it to possably
+    # narrow results when unknown)
+
+    self.is_not_or_port = True
+
+    # if set then this overwrites fingerprint lookups
+
+    self.fingerprint_overwrite = None
+
+  def get_address(self):
+    """
+    Provides the IP address of the endpoint.
+    """
+
+    return self.address
+
+  def get_port(self):
+    """
+    Provides the port of the endpoint.
+    """
+
+    return self.port
+
+  def get_hostname(self, default = None):
+    """
+    Provides the hostname associated with the relay's address. This is a
+    non-blocking call and returns None if the address either can't be resolved
+    or hasn't been resolved yet.
+
+    Arguments:
+      default - return value if no hostname is available
+    """
+
+    # TODO: skipping all hostname resolution to be safe for now
+    #try:
+    #  myHostname = hostnames.resolve(self.address)
+    #except:
+    #  # either a ValueError or IOError depending on the source of the lookup failure
+    #  myHostname = None
+    #
+    #if not myHostname: return default
+    #else: return myHostname
+
+    return default
+
+  def get_locale(self, default=None):
+    """
+    Provides the two letter country code for the IP address' locale.
+
+    Arguments:
+      default - return value if no locale information is available
+    """
+
+    conn = tor_tools.get_conn()
+    return conn.get_info("ip-to-country/%s" % self.address, default)
+
+  def get_fingerprint(self):
+    """
+    Provides the fingerprint of the relay, returning "UNKNOWN" if it can't be
+    determined.
+    """
+
+    if self.fingerprint_overwrite:
+      return self.fingerprint_overwrite
+
+    conn = tor_tools.get_conn()
+    my_fingerprint = conn.get_relay_fingerprint(self.address)
+
+    # If there were multiple matches and our port is likely the ORPort then
+    # try again with that to narrow the results.
+
+    if not my_fingerprint and not self.is_not_or_port:
+      my_fingerprint = conn.get_relay_fingerprint(self.address, self.port)
+
+    if my_fingerprint:
+      return my_fingerprint
+    else:
+      return "UNKNOWN"
+
+  def get_nickname(self):
+    """
+    Provides the nickname of the relay, retuning "UNKNOWN" if it can't be
+    determined.
+    """
+
+    my_fingerprint = self.get_fingerprint()
+
+    if my_fingerprint != "UNKNOWN":
+      conn = tor_tools.get_conn()
+      my_nickname = conn.get_relay_nickname(my_fingerprint)
+
+      if my_nickname:
+        return my_nickname
+      else:
+        return "UNKNOWN"
+    else:
+      return "UNKNOWN"
+
+
+class ConnectionEntry(entries.ConnectionPanelEntry):
+  """
+  Represents a connection being made to or from this system. These only
+  concern real connections so it includes the inbound, outbound, directory,
+  application, and controller categories.
+  """
+
+  def __init__(self, local_address, local_port, remote_address, remote_port):
+    entries.ConnectionPanelEntry.__init__(self)
+    self.lines = [ConnectionLine(local_address, local_port, remote_address, remote_port)]
+
+  def get_sort_value(self, attr, listing_type):
+    """
+    Provides the value of a single attribute used for sorting purposes.
+    """
+
+    connection_line = self.lines[0]
+
+    if attr == entries.SortAttr.IP_ADDRESS:
+      if connection_line.is_private():
+        return SCRUBBED_IP_VAL  # orders at the end
+
+      return connection_line.sort_address
+    elif attr == entries.SortAttr.PORT:
+      return connection_line.sort_port
+    elif attr == entries.SortAttr.HOSTNAME:
+      if connection_line.is_private():
+        return ""
+
+      return connection_line.foreign.get_hostname("")
+    elif attr == entries.SortAttr.FINGERPRINT:
+      return connection_line.foreign.get_fingerprint()
+    elif attr == entries.SortAttr.NICKNAME:
+      my_nickname = connection_line.foreign.get_nickname()
+
+      if my_nickname == "UNKNOWN":
+        return "z" * 20  # orders at the end
+      else:
+        return my_nickname.lower()
+    elif attr == entries.SortAttr.CATEGORY:
+      return Category.index_of(connection_line.get_type())
+    elif attr == entries.SortAttr.UPTIME:
+      return connection_line.start_time
+    elif attr == entries.SortAttr.COUNTRY:
+      if connection.is_private_address(self.lines[0].foreign.get_address()):
+        return ""
+      else:
+        return connection_line.foreign.get_locale("")
+    else:
+      return entries.ConnectionPanelEntry.get_sort_value(self, attr, listing_type)
+
+
+class ConnectionLine(entries.ConnectionPanelLine):
+  """
+  Display component of the ConnectionEntry.
+  """
+
+  def __init__(self, local_address, local_port, remote_address, remote_port, include_port=True, include_expanded_addresses=True):
+    entries.ConnectionPanelLine.__init__(self)
+
+    self.local = Endpoint(local_address, local_port)
+    self.foreign = Endpoint(remote_address, remote_port)
+    self.start_time = time.time()
+    self.is_initial_connection = False
+
+    # overwrite the local fingerprint with ours
+
+    conn = tor_tools.get_conn()
+    self.local.fingerprint_overwrite = conn.get_info("fingerprint", None)
+
+    # True if the connection has matched the properties of a client/directory
+    # connection every time we've checked. The criteria we check is...
+    #   client    - first hop in an established circuit
+    #   directory - matches an established single-hop circuit (probably a
+    #               directory mirror)
+
+    self._possible_client = True
+    self._possible_directory = True
+
+    # attributes for SOCKS, HIDDEN, and CONTROL connections
+
+    self.application_name = None
+    self.application_pid = None
+    self.is_application_resolving = False
+
+    my_or_port = conn.get_option("ORPort", None)
+    my_dir_port = conn.get_option("DirPort", None)
+    my_socks_port = conn.get_option("SocksPort", "9050")
+    my_ctl_port = conn.get_option("ControlPort", None)
+    my_hidden_service_ports = conn.get_hidden_service_ports()
+
+    # the ORListenAddress can overwrite the ORPort
+
+    listen_addr = conn.get_option("ORListenAddress", None)
+
+    if listen_addr and ":" in listen_addr:
+      my_or_port = listen_addr[listen_addr.find(":") + 1:]
+
+    if local_port in (my_or_port, my_dir_port):
+      self.base_type = Category.INBOUND
+      self.local.is_not_or_port = False
+    elif local_port == my_socks_port:
+      self.base_type = Category.SOCKS
+    elif remote_port in my_hidden_service_ports:
+      self.base_type = Category.HIDDEN
+    elif local_port == my_ctl_port:
+      self.base_type = Category.CONTROL
+    else:
+      self.base_type = Category.OUTBOUND
+      self.foreign.is_not_or_port = False
+
+    self.cached_type = None
+
+    # includes the port or expanded ip address field when displaying listing
+    # information if true
+
+    self.include_port = include_port
+    self.include_expanded_addresses = include_expanded_addresses
+
+    # cached immutable values used for sorting
+
+    ip_value = 0
+
+    for comp in self.foreign.get_address().split("."):
+      ip_value *= 255
+      ip_value += int(comp)
+
+    self.sort_address = ip_value
+    self.sort_port = int(self.foreign.get_port())
+
+  def get_listing_entry(self, width, current_time, listing_type):
+    """
+    Provides the tuple list for this connection's listing. Lines are composed
+    of the following components:
+      <src>  -->  <dst>     <etc>     <uptime> (<type>)
+
+    ListingType.IP_ADDRESS:
+      src - <internal addr:port> --> <external addr:port>
+      dst - <destination addr:port>
+      etc - <fingerprint> <nickname>
+
+    ListingType.HOSTNAME:
+      src - localhost:<port>
+      dst - <destination hostname:port>
+      etc - <destination addr:port> <fingerprint> <nickname>
+
+    ListingType.FINGERPRINT:
+      src - localhost
+      dst - <destination fingerprint>
+      etc - <nickname> <destination addr:port>
+
+    ListingType.NICKNAME:
+      src - <source nickname>
+      dst - <destination nickname>
+      etc - <fingerprint> <destination addr:port>
+
+    Arguments:
+      width       - maximum length of the line
+      current_time - unix timestamp for what the results should consider to be
+                    the current time
+      listing_type - primary attribute we're listing connections by
+    """
+
+    # fetch our (most likely cached) display entry for the listing
+
+    my_listing = entries.ConnectionPanelLine.get_listing_entry(self, width, current_time, listing_type)
+
+    # fill in the current uptime and return the results
+
+    if CONFIG["features.connection.markInitialConnections"]:
+      time_prefix = "+" if self.is_initial_connection else " "
+    else:
+      time_prefix = ""
+
+    time_label = time_prefix + "%5s" % str_tools.get_time_label(current_time - self.start_time, 1)
+    my_listing[2] = (time_label, my_listing[2][1])
+
+    return my_listing
+
+  def is_unresolved_application(self):
+    """
+    True if our display uses application information that hasn't yet been resolved.
+    """
+
+    return self.application_name is None and self.get_type() in (Category.SOCKS, Category.HIDDEN, Category.CONTROL)
+
+  def _get_listing_entry(self, width, current_time, listing_type):
+    entry_type = self.get_type()
+
+    # Lines are split into the following components in reverse:
+    # init gap - " "
+    # content  - "<src>  -->  <dst>     <etc>     "
+    # time     - "<uptime>"
+    # preType  - " ("
+    # category - "<type>"
+    # postType - ")   "
+
+    line_format = ui_tools.get_color(CATEGORY_COLOR[entry_type])
+    time_width = 6 if CONFIG["features.connection.markInitialConnections"] else 5
+
+    draw_entry = [(" ", line_format),
+                  (self._get_listing_content(width - (12 + time_width) - 1, listing_type), line_format),
+                  (" " * time_width, line_format),
+                  (" (", line_format),
+                  (entry_type.upper(), line_format | curses.A_BOLD),
+                  (")" + " " * (9 - len(entry_type)), line_format)]
+
+    return draw_entry
+
+  def _get_details(self, width):
+    """
+    Provides details on the connection, correlated against available consensus
+    data.
+
+    Arguments:
+      width - available space to display in
+    """
+
+    detail_format = curses.A_BOLD | ui_tools.get_color(CATEGORY_COLOR[self.get_type()])
+    return [(line, detail_format) for line in self._get_detail_content(width)]
+
+  def reset_display(self):
+    entries.ConnectionPanelLine.reset_display(self)
+    self.cached_type = None
+
+  def is_private(self):
+    """
+    Returns true if the endpoint is private, possibly belonging to a client
+    connection or exit traffic.
+    """
+
+    if not CONFIG["features.connection.showIps"]:
+      return True
+
+    # This is used to scrub private information from the interface. Relaying
+    # etiquette (and wiretapping laws) say these are bad things to look at so
+    # DON'T CHANGE THIS UNLESS YOU HAVE A DAMN GOOD REASON!
+
+    my_type = self.get_type()
+
+    if my_type == Category.INBOUND:
+      # if we're a guard or bridge and the connection doesn't belong to a
+      # known relay then it might be client traffic
+
+      conn = tor_tools.get_conn()
+
+      if "Guard" in conn.get_my_flags([]) or conn.get_option("BridgeRelay", None) == "1":
+        all_matches = conn.get_relay_fingerprint(self.foreign.get_address(), get_all_matches = True)
+
+        return all_matches == []
+    elif my_type == Category.EXIT:
+      # DNS connections exiting us aren't private (since they're hitting our
+      # resolvers). Everything else, however, is.
+
+      # TODO: Ideally this would also double check that it's a UDP connection
+      # (since DNS is the only UDP connections Tor will relay), however this
+      # will take a bit more work to propagate the information up from the
+      # connection resolver.
+
+      return self.foreign.get_port() != "53"
+
+    # for everything else this isn't a concern
+
+    return False
+
+  def get_type(self):
+    """
+    Provides our best guess at the current type of the connection. This
+    depends on consensus results, our current client circuits, etc. Results
+    are cached until this entry's display is reset.
+    """
+
+    # caches both to simplify the calls and to keep the type consistent until
+    # we want to reflect changes
+
+    if not self.cached_type:
+      if self.base_type == Category.OUTBOUND:
+        # Currently the only non-static categories are OUTBOUND vs...
+        # - EXIT since this depends on the current consensus
+        # - CIRCUIT if this is likely to belong to our guard usage
+        # - DIRECTORY if this is a single-hop circuit (directory mirror?)
+        #
+        # The exitability, circuits, and fingerprints are all cached by the
+        # tor_tools util keeping this a quick lookup.
+
+        conn = tor_tools.get_conn()
+        destination_fingerprint = self.foreign.get_fingerprint()
+
+        if destination_fingerprint == "UNKNOWN":
+          # Not a known relay. This might be an exit connection.
+
+          if conn.is_exiting_allowed(self.foreign.get_address(), self.foreign.get_port()):
+            self.cached_type = Category.EXIT
+        elif self._possible_client or self._possible_directory:
+          # This belongs to a known relay. If we haven't eliminated ourselves as
+          # a possible client or directory connection then check if it still
+          # holds true.
+
+          my_circuits = conn.get_circuits()
+
+          if self._possible_client:
+            # Checks that this belongs to the first hop in a circuit that's
+            # either unestablished or longer than a single hop (ie, anything but
+            # a built 1-hop connection since those are most likely a directory
+            # mirror).
+
+            for _, status, _, path in my_circuits:
+              if path and path[0] == destination_fingerprint and (status != "BUILT" or len(path) > 1):
+                self.cached_type = Category.CIRCUIT  # matched a probable guard connection
+
+            # if we fell through, we can eliminate ourselves as a guard in the future
+            if not self.cached_type:
+              self._possible_client = False
+
+          if self._possible_directory:
+            # Checks if we match a built, single hop circuit.
+
+            for _, status, _, path in my_circuits:
+              if path and path[0] == destination_fingerprint and status == "BUILT" and len(path) == 1:
+                self.cached_type = Category.DIRECTORY
+
+            # if we fell through, eliminate ourselves as a directory connection
+            if not self.cached_type:
+              self._possible_directory = False
+
+      if not self.cached_type:
+        self.cached_type = self.base_type
+
+    return self.cached_type
+
+  def get_etc_content(self, width, listing_type):
+    """
+    Provides the optional content for the connection.
+
+    Arguments:
+      width       - maximum length of the line
+      listing_type - primary attribute we're listing connections by
+    """
+
+    # for applications show the command/pid
+
+    if self.get_type() in (Category.SOCKS, Category.HIDDEN, Category.CONTROL):
+      display_label = ""
+
+      if self.application_name:
+        if self.application_pid:
+          display_label = "%s (%s)" % (self.application_name, self.application_pid)
+        else:
+          display_label = self.application_name
+      elif self.is_application_resolving:
+        display_label = "resolving..."
+      else:
+        display_label = "UNKNOWN"
+
+      if len(display_label) < width:
+        return ("%%-%is" % width) % display_label
+      else:
+        return ""
+
+    # for everything else display connection/consensus information
+
+    destination_address = self.get_destination_label(26, include_locale = True)
+    etc, used_space = "", 0
+
+    if listing_type == entries.ListingType.IP_ADDRESS:
+      if width > used_space + 42 and CONFIG["features.connection.showColumn.fingerprint"]:
+        # show fingerprint (column width: 42 characters)
+
+        etc += "%-40s  " % self.foreign.get_fingerprint()
+        used_space += 42
+
+      if width > used_space + 10 and CONFIG["features.connection.showColumn.nickname"]:
+        # show nickname (column width: remainder)
+
+        nickname_space = width - used_space
+        nickname_label = ui_tools.crop_str(self.foreign.get_nickname(), nickname_space, 0)
+        etc += ("%%-%is  " % nickname_space) % nickname_label
+        used_space += nickname_space + 2
+    elif listing_type == entries.ListingType.HOSTNAME:
+      if width > used_space + 28 and CONFIG["features.connection.showColumn.destination"]:
+        # show destination ip/port/locale (column width: 28 characters)
+        etc += "%-26s  " % destination_address
+        used_space += 28
+
+      if width > used_space + 42 and CONFIG["features.connection.showColumn.fingerprint"]:
+        # show fingerprint (column width: 42 characters)
+        etc += "%-40s  " % self.foreign.get_fingerprint()
+        used_space += 42
+
+      if width > used_space + 17 and CONFIG["features.connection.showColumn.nickname"]:
+        # show nickname (column width: min 17 characters, uses half of the remainder)
+        nickname_space = 15 + (width - (used_space + 17)) / 2
+        nickname_label = ui_tools.crop_str(self.foreign.get_nickname(), nickname_space, 0)
+        etc += ("%%-%is  " % nickname_space) % nickname_label
+        used_space += (nickname_space + 2)
+    elif listing_type == entries.ListingType.FINGERPRINT:
+      if width > used_space + 17:
+        # show nickname (column width: min 17 characters, consumes any remaining space)
+
+        nickname_space = width - used_space - 2
+
+        # if there's room then also show a column with the destination
+        # ip/port/locale (column width: 28 characters)
+
+        is_locale_included = width > used_space + 45
+        is_locale_included &= CONFIG["features.connection.showColumn.destination"]
+
+        if is_locale_included:
+          nickname_space -= 28
+
+        if CONFIG["features.connection.showColumn.nickname"]:
+          nickname_label = ui_tools.crop_str(self.foreign.get_nickname(), nickname_space, 0)
+          etc += ("%%-%is  " % nickname_space) % nickname_label
+          used_space += nickname_space + 2
+
+        if is_locale_included:
+          etc += "%-26s  " % destination_address
+          used_space += 28
+    else:
+      if width > used_space + 42 and CONFIG["features.connection.showColumn.fingerprint"]:
+        # show fingerprint (column width: 42 characters)
+        etc += "%-40s  " % self.foreign.get_fingerprint()
+        used_space += 42
+
+      if width > used_space + 28 and CONFIG["features.connection.showColumn.destination"]:
+        # show destination ip/port/locale (column width: 28 characters)
+        etc += "%-26s  " % destination_address
+        used_space += 28
+
+    return ("%%-%is" % width) % etc
+
+  def _get_listing_content(self, width, listing_type):
+    """
+    Provides the source, destination, and extra info for our listing.
+
+    Arguments:
+      width       - maximum length of the line
+      listing_type - primary attribute we're listing connections by
+    """
+
+    conn = tor_tools.get_conn()
+    my_type = self.get_type()
+    destination_address = self.get_destination_label(26, include_locale = True)
+
+    # The required widths are the sum of the following:
+    # - room for LABEL_FORMAT and LABEL_MIN_PADDING (11 characters)
+    # - base data for the listing
+    # - that extra field plus any previous
+
+    used_space = len(LABEL_FORMAT % tuple([""] * 4)) + LABEL_MIN_PADDING
+    local_port = ":%s" % self.local.get_port() if self.include_port else ""
+
+    src, dst, etc = "", "", ""
+
+    if listing_type == entries.ListingType.IP_ADDRESS:
+      my_external_address = conn.get_info("address", self.local.get_address())
+      address_differ = my_external_address != self.local.get_address()
+
+      # Expanding doesn't make sense, if the connection isn't actually
+      # going through Tor's external IP address. As there isn't a known
+      # method for checking if it is, we're checking the type instead.
+      #
+      # This isn't entirely correct. It might be a better idea to check if
+      # the source and destination addresses are both private, but that might
+      # not be perfectly reliable either.
+
+      is_expansion_type = not my_type in (Category.SOCKS, Category.HIDDEN, Category.CONTROL)
+
+      if is_expansion_type:
+        src_address = my_external_address + local_port
+      else:
+        src_address = self.local.get_address() + local_port
+
+      if my_type in (Category.SOCKS, Category.CONTROL):
+        # Like inbound connections these need their source and destination to
+        # be swapped. However, this only applies when listing by IP or hostname
+        # (their fingerprint and nickname are both for us). Reversing the
+        # fields here to keep the same column alignments.
+
+        src = "%-21s" % destination_address
+        dst = "%-26s" % src_address
+      else:
+        src = "%-21s" % src_address  # ip:port = max of 21 characters
+        dst = "%-26s" % destination_address  # ip:port (xx) = max of 26 characters
+
+      used_space += len(src) + len(dst)  # base data requires 47 characters
+
+      # Showing the fingerprint (which has the width of 42) has priority over
+      # an expanded address field. Hence check if we either have space for
+      # both or wouldn't be showing the fingerprint regardless.
+
+      is_expanded_address_visible = width > used_space + 28
+
+      if is_expanded_address_visible and CONFIG["features.connection.showColumn.fingerprint"]:
+        is_expanded_address_visible = width < used_space + 42 or width > used_space + 70
+
+      if address_differ and is_expansion_type and is_expanded_address_visible and self.include_expanded_addresses and CONFIG["features.connection.showColumn.expandedIp"]:
+        # include the internal address in the src (extra 28 characters)
+
+        internal_address = self.local.get_address() + local_port
+
+        # If this is an inbound connection then reverse ordering so it's:
+        # <foreign> --> <external> --> <internal>
+        # when the src and dst are swapped later
+
+        if my_type == Category.INBOUND:
+          src = "%-21s  -->  %s" % (src, internal_address)
+        else:
+          src = "%-21s  -->  %s" % (internal_address, src)
+
+        used_space += 28
+
+      etc = self.get_etc_content(width - used_space, listing_type)
+      used_space += len(etc)
+    elif listing_type == entries.ListingType.HOSTNAME:
+      # 15 characters for source, and a min of 40 reserved for the destination
+      # TODO: when actually functional the src and dst need to be swapped for
+      # SOCKS and CONTROL connections
+
+      src = "localhost%-6s" % local_port
+      used_space += len(src)
+      min_hostname_space = 40
+
+      etc = self.get_etc_content(width - used_space - min_hostname_space, listing_type)
+      used_space += len(etc)
+
+      hostname_space = width - used_space
+      used_space = width  # prevents padding at the end
+
+      if self.is_private():
+        dst = ("%%-%is" % hostname_space) % "<scrubbed>"
+      else:
+        hostname = self.foreign.get_hostname(self.foreign.get_address())
+        port_label = ":%-5s" % self.foreign.get_port() if self.include_port else ""
+
+        # truncates long hostnames and sets dst to <hostname>:<port>
+
+        hostname = ui_tools.crop_str(hostname, hostname_space, 0)
+        dst = ("%%-%is" % hostname_space) % (hostname + port_label)
+    elif listing_type == entries.ListingType.FINGERPRINT:
+      src = "localhost"
+
+      if my_type == Category.CONTROL:
+        dst = "localhost"
+      else:
+        dst = self.foreign.get_fingerprint()
+
+      dst = "%-40s" % dst
+
+      used_space += len(src) + len(dst)  # base data requires 49 characters
+
+      etc = self.get_etc_content(width - used_space, listing_type)
+      used_space += len(etc)
+    else:
+      # base data requires 50 min characters
+      src = self.local.get_nickname()
+
+      if my_type == Category.CONTROL:
+        dst = self.local.get_nickname()
+      else:
+        dst = self.foreign.get_nickname()
+
+      min_base_space = 50
+
+      etc = self.get_etc_content(width - used_space - min_base_space, listing_type)
+      used_space += len(etc)
+
+      base_space = width - used_space
+      used_space = width  # prevents padding at the end
+
+      if len(src) + len(dst) > base_space:
+        src = ui_tools.crop_str(src, base_space / 3)
+        dst = ui_tools.crop_str(dst, base_space - len(src))
+
+      # pads dst entry to its max space
+
+      dst = ("%%-%is" % (base_space - len(src))) % dst
+
+    if my_type == Category.INBOUND:
+      src, dst = dst, src
+
+    padding = " " * (width - used_space + LABEL_MIN_PADDING)
+
+    return LABEL_FORMAT % (src, dst, etc, padding)
+
+  def _get_detail_content(self, width):
+    """
+    Provides a list with detailed information for this connection.
+
+    Arguments:
+      width - max length of lines
+    """
+
+    lines = [""] * 7
+    lines[0] = "address: %s" % self.get_destination_label(width - 11)
+    lines[1] = "locale: %s" % ("??" if self.is_private() else self.foreign.get_locale("??"))
+
+    # Remaining data concerns the consensus results, with three possible cases:
+    # - if there's a single match then display its details
+    # - if there's multiple potential relays then list all of the combinations
+    #   of ORPorts / Fingerprints
+    # - if no consensus data is available then say so (probably a client or
+    #   exit connection)
+
+    fingerprint = self.foreign.get_fingerprint()
+    conn = tor_tools.get_conn()
+
+    if fingerprint != "UNKNOWN":
+      # single match - display information available about it
+
+      ns_entry = conn.get_consensus_entry(fingerprint)
+      desc_entry = conn.get_descriptor_entry(fingerprint)
+
+      # append the fingerprint to the second line
+
+      lines[1] = "%-13sfingerprint: %s" % (lines[1], fingerprint)
+
+      if ns_entry:
+        # example consensus entry:
+        # r murble R8sCM1ar1sS2GulQYFVmvN95xsk RJr6q+wkTFG+ng5v2bdCbVVFfA4 2011-02-21 00:25:32 195.43.157.85 443 0
+        # s Exit Fast Guard Named Running Stable Valid
+        # w Bandwidth=2540
+        # p accept 20-23,43,53,79-81,88,110,143,194,443
+
+        ns_lines = ns_entry.split("\n")
+
+        first_line_comp = ns_lines[0].split(" ")
+
+        if len(first_line_comp) >= 9:
+          _, nickname, _, _, published_date, published_time, _, or_port, dir_port = first_line_comp[:9]
+        else:
+          nickname, published_date, published_time, or_port, dir_port = "", "", "", "", ""
+
+        flags = "unknown"
+
+        if len(ns_lines) >= 2 and ns_lines[1].startswith("s "):
+          flags = ns_lines[1][2:]
+
+        exit_policy = conn.get_relay_exit_policy(fingerprint)
+
+        if exit_policy:
+          policy_label = exit_policy.summary()
+        else:
+          policy_label = "unknown"
+
+        dir_port_label = "" if dir_port == "0" else "dirport: %s" % dir_port
+        lines[2] = "nickname: %-25s orport: %-10s %s" % (nickname, or_port, dir_port_label)
+        lines[3] = "published: %s %s" % (published_time, published_date)
+        lines[4] = "flags: %s" % flags.replace(" ", ", ")
+        lines[5] = "exit policy: %s" % policy_label
+
+      if desc_entry:
+        tor_version, platform, contact = "", "", ""
+
+        for desc_line in desc_entry.split("\n"):
+          if desc_line.startswith("platform"):
+            # has the tor version and platform, ex:
+            # platform Tor 0.2.1.29 (r318f470bc5f2ad43) on Linux x86_64
+
+            tor_version = desc_line[13:desc_line.find(" ", 13)]
+            platform = desc_line[desc_line.rfind(" on ") + 4:]
+          elif desc_line.startswith("contact"):
+            contact = desc_line[8:]
+
+            # clears up some highly common obscuring
+
+            for alias in (" at ", " AT "):
+              contact = contact.replace(alias, "@")
+
+            for alias in (" dot ", " DOT "):
+              contact = contact.replace(alias, ".")
+
+            break  # contact lines come after the platform
+
+        lines[3] = "%-35s os: %-14s version: %s" % (lines[3], platform, tor_version)
+
+        # contact information is an optional field
+
+        if contact:
+          lines[6] = "contact: %s" % contact
+    else:
+      all_matches = conn.get_relay_fingerprint(self.foreign.get_address(), get_all_matches = True)
+
+      if all_matches:
+        # multiple matches
+        lines[2] = "Multiple matches, possible fingerprints are:"
+
+        for i in range(len(all_matches)):
+          is_last_line = i == 3
+
+          relay_port, relay_fingerprint = all_matches[i]
+          line_text = "%i. or port: %-5s fingerprint: %s" % (i, relay_port, relay_fingerprint)
+
+          # if there's multiple lines remaining at the end then give a count
+
+          remaining_relays = len(all_matches) - i
+
+          if is_last_line and remaining_relays > 1:
+            line_text = "... %i more" % remaining_relays
+
+          lines[3 + i] = line_text
+
+          if is_last_line:
+            break
+      else:
+        # no consensus entry for this ip address
+        lines[2] = "No consensus data found"
+
+    # crops any lines that are too long
+
+    for i in range(len(lines)):
+      lines[i] = ui_tools.crop_str(lines[i], width - 2)
+
+    return lines
+
+  def get_destination_label(self, max_length, include_locale = False, include_hostname = False):
+    """
+    Provides a short description of the destination. This is made up of two
+    components, the base <ip addr>:<port> and an extra piece of information in
+    parentheses. The IP address is scrubbed from private connections.
+
+    Extra information is...
+    - the port's purpose for exit connections
+    - the locale and/or hostname if set to do so, the address isn't private,
+      and isn't on the local network
+    - nothing otherwise
+
+    Arguments:
+      max_length       - maximum length of the string returned
+      include_locale   - possibly includes the locale
+      include_hostname - possibly includes the hostname
+    """
+
+    # the port and port derived data can be hidden by config or without include_port
+
+    include_port = self.include_port and (CONFIG["features.connection.showExitPort"] or self.get_type() != Category.EXIT)
+
+    # destination of the connection
+
+    address_label = "<scrubbed>" if self.is_private() else self.foreign.get_address()
+    port_label = ":%s" % self.foreign.get_port() if include_port else ""
+    destination_address = address_label + port_label
+
+    # Only append the extra info if there's at least a couple characters of
+    # space (this is what's needed for the country codes).
+
+    if len(destination_address) + 5 <= max_length:
+      space_available = max_length - len(destination_address) - 3
+
+      if self.get_type() == Category.EXIT and include_port:
+        purpose = connection.port_usage(self.foreign.get_port())
+
+        if purpose:
+          # BitTorrent is a common protocol to truncate, so just use "Torrent"
+          # if there's not enough room.
+
+          if len(purpose) > space_available and purpose == "BitTorrent":
+            purpose = "Torrent"
+
+          # crops with a hyphen if too long
+
+          purpose = ui_tools.crop_str(purpose, space_available, end_type = ui_tools.Ending.HYPHEN)
+
+          destination_address += " (%s)" % purpose
+      elif not connection.is_private_address(self.foreign.get_address()):
+        extra_info = []
+        conn = tor_tools.get_conn()
+
+        if include_locale and not conn.is_geoip_unavailable():
+          foreign_locale = self.foreign.get_locale("??")
+          extra_info.append(foreign_locale)
+          space_available -= len(foreign_locale) + 2
+
+        if include_hostname:
+          destination_hostname = self.foreign.get_hostname()
+
+          if destination_hostname:
+            # determines the full space available, taking into account the ", "
+            # dividers if there's multiple pieces of extra data
+
+            max_hostname_space = space_available - 2 * len(extra_info)
+            destination_hostname = ui_tools.crop_str(destination_hostname, max_hostname_space)
+            extra_info.append(destination_hostname)
+            space_available -= len(destination_hostname)
+
+        if extra_info:
+          destination_address += " (%s)" % ", ".join(extra_info)
+
+    return destination_address[:max_length]
diff --git a/arm/connections/conn_panel.py b/arm/connections/conn_panel.py
new file mode 100644
index 0000000..5ccd880
--- /dev/null
+++ b/arm/connections/conn_panel.py
@@ -0,0 +1,668 @@
+"""
+Listing of the currently established connections tor has made.
+"""
+
+import re
+import time
+import curses
+import threading
+
+import arm.popups
+import arm.util.tracker
+
+from arm.connections import count_popup, descriptor_popup, entries, conn_entry, circ_entry
+from arm.util import panel, tor_tools, tracker, ui_tools
+
+from stem.control import State
+from stem.util import conf, connection, enum
+
+# height of the detail panel content, not counting top and bottom border
+
+DETAILS_HEIGHT = 7
+
+# listing types
+
+Listing = enum.Enum(("IP_ADDRESS", "IP Address"), "HOSTNAME", "FINGERPRINT", "NICKNAME")
+
+
+def conf_handler(key, value):
+  if key == "features.connection.listing_type":
+    return conf.parse_enum(key, value, Listing)
+  elif key == "features.connection.refreshRate":
+    return max(1, value)
+  elif key == "features.connection.order":
+    return conf.parse_enum_csv(key, value[0], entries.SortAttr, 3)
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.connection.resolveApps": True,
+  "features.connection.listing_type": Listing.IP_ADDRESS,
+  "features.connection.order": [
+    entries.SortAttr.CATEGORY,
+    entries.SortAttr.LISTING,
+    entries.SortAttr.UPTIME],
+  "features.connection.refreshRate": 5,
+  "features.connection.showIps": True,
+}, conf_handler)
+
+
+class ConnectionPanel(panel.Panel, threading.Thread):
+  """
+  Listing of connections tor is making, with information correlated against
+  the current consensus and other data sources.
+  """
+
+  def __init__(self, stdscr):
+    panel.Panel.__init__(self, stdscr, "connections", 0)
+    threading.Thread.__init__(self)
+    self.setDaemon(True)
+
+    # defaults our listing selection to fingerprints if ip address
+    # displaying is disabled
+    #
+    # TODO: This is a little sucky in that it won't work if showIps changes
+    # while we're running (... but arm doesn't allow for that atm)
+
+    if not CONFIG["features.connection.showIps"] and CONFIG["features.connection.listing_type"] == 0:
+      arm_config = conf.get_config("arm")
+      arm_config.set("features.connection.listing_type", Listing.keys()[Listing.index_of(Listing.FINGERPRINT)])
+
+    self._scroller = ui_tools.Scroller(True)
+    self._title = "Connections:"  # title line of the panel
+    self._entries = []            # last fetched display entries
+    self._entry_lines = []        # individual lines rendered from the entries listing
+    self._show_details = False    # presents the details panel if true
+
+    self._last_update = -1        # time the content was last revised
+    self._is_tor_running = True   # indicates if tor is currently running or not
+    self._halt_time = None        # time when tor was stopped
+    self._halt = False            # terminates thread if true
+    self._cond = threading.Condition()  # used for pausing the thread
+    self.vals_lock = threading.RLock()
+
+    # Tracks exiting port and client country statistics
+
+    self._client_locale_usage = {}
+    self._exit_port_usage = {}
+
+    # If we're a bridge and been running over a day then prepopulates with the
+    # last day's clients.
+
+    conn = tor_tools.get_conn()
+    bridge_clients = conn.get_info("status/clients-seen", None)
+
+    if bridge_clients:
+      # Response has a couple arguments...
+      # TimeStarted="2011-08-17 15:50:49" CountrySummary=us=16,de=8,uk=8
+
+      country_summary = None
+
+      for arg in bridge_clients.split():
+        if arg.startswith("CountrySummary="):
+          country_summary = arg[15:]
+          break
+
+      if country_summary:
+        for entry in country_summary.split(","):
+          if re.match("^..=[0-9]+$", entry):
+            locale, count = entry.split("=", 1)
+            self._client_locale_usage[locale] = int(count)
+
+    # Last sampling received from the ConnectionResolver, used to detect when
+    # it changes.
+
+    self._last_resource_fetch = -1
+
+    # resolver for the command/pid associated with SOCKS, HIDDEN, and CONTROL connections
+
+    self._app_resolver = tracker.get_port_usage_tracker()
+
+    # rate limits appResolver queries to once per update
+
+    self.app_resolve_since_update = False
+
+    # mark the initially exitsing connection uptimes as being estimates
+
+    for entry in self._entries:
+      if isinstance(entry, conn_entry.ConnectionEntry):
+        entry.getLines()[0].is_initial_connection = True
+
+    # listens for when tor stops so we know to stop reflecting changes
+
+    conn.add_status_listener(self.tor_state_listener)
+
+  def tor_state_listener(self, controller, event_type, _):
+    """
+    Freezes the connection contents when Tor stops.
+    """
+
+    self._is_tor_running = event_type in (State.INIT, State.RESET)
+
+    if self._is_tor_running:
+      self._halt_time = None
+    else:
+      self._halt_time = time.time()
+
+    self.redraw(True)
+
+  def get_pause_time(self):
+    """
+    Provides the time Tor stopped if it isn't running. Otherwise this is the
+    time we were last paused.
+    """
+
+    if self._halt_time:
+      return self._halt_time
+    else:
+      return panel.Panel.get_pause_time(self)
+
+  def set_sort_order(self, ordering = None):
+    """
+    Sets the connection attributes we're sorting by and resorts the contents.
+
+    Arguments:
+      ordering - new ordering, if undefined then this resorts with the last
+                 set ordering
+    """
+
+    self.vals_lock.acquire()
+
+    if ordering:
+      arm_config = conf.get_config("arm")
+
+      ordering_keys = [entries.SortAttr.keys()[entries.SortAttr.index_of(v)] for v in ordering]
+      arm_config.set("features.connection.order", ", ".join(ordering_keys))
+
+    self._entries.sort(key = lambda i: (i.get_sort_values(CONFIG["features.connection.order"], self.get_listing_type())))
+
+    self._entry_lines = []
+
+    for entry in self._entries:
+      self._entry_lines += entry.getLines()
+
+    self.vals_lock.release()
+
+  def get_listing_type(self):
+    """
+    Provides the priority content we list connections by.
+    """
+
+    return CONFIG["features.connection.listing_type"]
+
+  def set_listing_type(self, listing_type):
+    """
+    Sets the priority information presented by the panel.
+
+    Arguments:
+      listing_type - Listing instance for the primary information to be shown
+    """
+
+    if self.get_listing_type() == listing_type:
+      return
+
+    self.vals_lock.acquire()
+
+    arm_config = conf.get_config("arm")
+    arm_config.set("features.connection.listing_type", Listing.keys()[Listing.index_of(listing_type)])
+
+    # if we're sorting by the listing then we need to resort
+
+    if entries.SortAttr.LISTING in CONFIG["features.connection.order"]:
+      self.set_sort_order()
+
+    self.vals_lock.release()
+
+  def is_clients_allowed(self):
+    """
+    True if client connections are permissable, false otherwise.
+    """
+
+    conn = tor_tools.get_conn()
+    return "Guard" in conn.get_my_flags([]) or conn.get_option("BridgeRelay", None) == "1"
+
+  def is_exits_allowed(self):
+    """
+    True if exit connections are permissable, false otherwise.
+    """
+
+    if not tor_tools.get_conn().get_option("ORPort", None):
+      return False  # no ORPort
+
+    policy = tor_tools.get_conn().get_exit_policy()
+
+    return policy and policy.is_exiting_allowed()
+
+  def show_sort_dialog(self):
+    """
+    Provides the sort dialog for our connections.
+    """
+
+    # set ordering for connection options
+
+    title_label = "Connection Ordering:"
+    options = list(entries.SortAttr)
+    old_selection = CONFIG["features.connection.order"]
+    option_colors = dict([(attr, entries.SORT_COLORS[attr]) for attr in options])
+    results = arm.popups.show_sort_dialog(title_label, options, old_selection, option_colors)
+
+    if results:
+      self.set_sort_order(results)
+
+  def handle_key(self, key):
+    self.vals_lock.acquire()
+
+    is_keystroke_consumed = True
+
+    if ui_tools.is_scroll_key(key):
+      page_height = self.get_preferred_size()[0] - 1
+
+      if self._show_details:
+        page_height -= (DETAILS_HEIGHT + 1)
+
+      is_changed = self._scroller.handle_key(key, self._entry_lines, page_height)
+
+      if is_changed:
+        self.redraw(True)
+    elif ui_tools.is_selection_key(key):
+      self._show_details = not self._show_details
+      self.redraw(True)
+    elif key == ord('s') or key == ord('S'):
+      self.show_sort_dialog()
+    elif key == ord('u') or key == ord('U'):
+      # provides a menu to pick the connection resolver
+
+      title = "Resolver Util:"
+      options = ["auto"] + list(connection.Resolver)
+      conn_resolver = arm.util.tracker.get_connection_tracker()
+
+      current_overwrite = conn_resolver.get_custom_resolver()
+
+      if current_overwrite is None:
+        old_selection = 0
+      else:
+        old_selection = options.index(current_overwrite)
+
+      selection = arm.popups.show_menu(title, options, old_selection)
+
+      # applies new setting
+
+      if selection != -1:
+        selected_option = options[selection] if selection != 0 else None
+        conn_resolver.set_custom_resolver(selected_option)
+    elif key == ord('l') or key == ord('L'):
+      # provides a menu to pick the primary information we list connections by
+
+      title = "List By:"
+      options = list(entries.ListingType)
+
+      # dropping the HOSTNAME listing type until we support displaying that content
+
+      options.remove(arm.connections.entries.ListingType.HOSTNAME)
+
+      old_selection = options.index(self.get_listing_type())
+      selection = arm.popups.show_menu(title, options, old_selection)
+
+      # applies new setting
+
+      if selection != -1:
+        self.set_listing_type(options[selection])
+    elif key == ord('d') or key == ord('D'):
+      # presents popup for raw consensus data
+      descriptor_popup.show_descriptor_popup(self)
+    elif (key == ord('c') or key == ord('C')) and self.is_clients_allowed():
+      count_popup.showCountDialog(count_popup.CountType.CLIENT_LOCALE, self._client_locale_usage)
+    elif (key == ord('e') or key == ord('E')) and self.is_exits_allowed():
+      count_popup.showCountDialog(count_popup.CountType.EXIT_PORT, self._exit_port_usage)
+    else:
+      is_keystroke_consumed = False
+
+    self.vals_lock.release()
+    return is_keystroke_consumed
+
+  def run(self):
+    """
+    Keeps connections listing updated, checking for new entries at a set rate.
+    """
+
+    last_draw = time.time() - 1
+
+    # Fetches out initial connection results. The wait is so this doesn't
+    # run during arm's interface initialization (otherwise there's a
+    # noticeable pause before the first redraw).
+
+    self._cond.acquire()
+    self._cond.wait(0.2)
+    self._cond.release()
+    self._update()             # populates initial entries
+    self._resolve_apps(False)  # resolves initial applications
+
+    while not self._halt:
+      current_time = time.time()
+
+      if self.is_paused() or not self._is_tor_running or current_time - last_draw < CONFIG["features.connection.refreshRate"]:
+        self._cond.acquire()
+
+        if not self._halt:
+          self._cond.wait(0.2)
+
+        self._cond.release()
+      else:
+        # updates content if their's new results, otherwise just redraws
+
+        self._update()
+        self.redraw(True)
+
+        # we may have missed multiple updates due to being paused, showing
+        # another panel, etc so last_draw might need to jump multiple ticks
+
+        draw_ticks = (time.time() - last_draw) / CONFIG["features.connection.refreshRate"]
+        last_draw += CONFIG["features.connection.refreshRate"] * draw_ticks
+
+  def get_help(self):
+    resolver_util = arm.util.tracker.get_connection_tracker().get_custom_resolver()
+
+    if resolver_util is None:
+      resolver_util = "auto"
+
+    options = []
+    options.append(("up arrow", "scroll up a line", None))
+    options.append(("down arrow", "scroll down a line", None))
+    options.append(("page up", "scroll up a page", None))
+    options.append(("page down", "scroll down a page", None))
+    options.append(("enter", "show connection details", None))
+    options.append(("d", "raw consensus descriptor", None))
+
+    if self.is_clients_allowed():
+      options.append(("c", "client locale usage summary", None))
+
+    if self.is_exits_allowed():
+      options.append(("e", "exit port usage summary", None))
+
+    options.append(("l", "listed identity", self.get_listing_type().lower()))
+    options.append(("s", "sort ordering", None))
+    options.append(("u", "resolving utility", resolver_util))
+    return options
+
+  def get_selection(self):
+    """
+    Provides the currently selected connection entry.
+    """
+
+    return self._scroller.get_cursor_selection(self._entry_lines)
+
+  def draw(self, width, height):
+    self.vals_lock.acquire()
+
+    # if we don't have any contents then refuse to show details
+
+    if not self._entries:
+      self._show_details = False
+
+    # extra line when showing the detail panel is for the bottom border
+
+    detail_panel_offset = DETAILS_HEIGHT + 1 if self._show_details else 0
+    is_scrollbar_visible = len(self._entry_lines) > height - detail_panel_offset - 1
+
+    scroll_location = self._scroller.get_scroll_location(self._entry_lines, height - detail_panel_offset - 1)
+    cursor_selection = self.get_selection()
+
+    # draws the detail panel if currently displaying it
+
+    if self._show_details and cursor_selection:
+      # This is a solid border unless the scrollbar is visible, in which case a
+      # 'T' pipe connects the border to the bar.
+
+      ui_tools.draw_box(self, 0, 0, width, DETAILS_HEIGHT + 2)
+
+      if is_scrollbar_visible:
+        self.addch(DETAILS_HEIGHT + 1, 1, curses.ACS_TTEE)
+
+      draw_entries = cursor_selection.get_details(width)
+
+      for i in range(min(len(draw_entries), DETAILS_HEIGHT)):
+        self.addstr(1 + i, 2, draw_entries[i][0], draw_entries[i][1])
+
+    # title label with connection counts
+
+    if self.is_title_visible():
+      title = "Connection Details:" if self._show_details else self._title
+      self.addstr(0, 0, title, curses.A_STANDOUT)
+
+    scroll_offset = 0
+
+    if is_scrollbar_visible:
+      scroll_offset = 2
+      self.add_scroll_bar(scroll_location, scroll_location + height - detail_panel_offset - 1, len(self._entry_lines), 1 + detail_panel_offset)
+
+    if self.is_paused() or not self._is_tor_running:
+      current_time = self.get_pause_time()
+    else:
+      current_time = time.time()
+
+    for line_number in range(scroll_location, len(self._entry_lines)):
+      entry_line = self._entry_lines[line_number]
+
+      # if this is an unresolved SOCKS, HIDDEN, or CONTROL entry then queue up
+      # resolution for the applicaitions they belong to
+
+      if isinstance(entry_line, conn_entry.ConnectionLine) and entry_line.is_unresolved_application():
+        self._resolve_apps()
+
+      # hilighting if this is the selected line
+
+      extra_format = curses.A_STANDOUT if entry_line == cursor_selection else curses.A_NORMAL
+
+      draw_line = line_number + detail_panel_offset + 1 - scroll_location
+
+      prefix = entry_line.get_listing_prefix()
+
+      for i in range(len(prefix)):
+        self.addch(draw_line, scroll_offset + i, prefix[i])
+
+      x_offset = scroll_offset + len(prefix)
+      draw_entry = entry_line.get_listing_entry(width - scroll_offset - len(prefix), current_time, self.get_listing_type())
+
+      for msg, attr in draw_entry:
+        attr |= extra_format
+        self.addstr(draw_line, x_offset, msg, attr)
+        x_offset += len(msg)
+
+      if draw_line >= height:
+        break
+
+    self.vals_lock.release()
+
+  def stop(self):
+    """
+    Halts further resolutions and terminates the thread.
+    """
+
+    self._cond.acquire()
+    self._halt = True
+    self._cond.notifyAll()
+    self._cond.release()
+
+  def _update(self):
+    """
+    Fetches the newest resolved connections.
+    """
+
+    self.app_resolve_since_update = False
+
+    # if we don't have an initialized resolver then this is a no-op
+
+    if not arm.util.tracker.get_connection_tracker().is_alive():
+      return
+
+    conn_resolver = arm.util.tracker.get_connection_tracker()
+    current_resolution_count = conn_resolver.run_counter()
+
+    self.vals_lock.acquire()
+
+    new_entries = []  # the new results we'll display
+
+    # Fetches new connections and client circuits...
+    # new_connections  [(local ip, local port, foreign ip, foreign port)...]
+    # new_circuits     {circuit_id => (status, purpose, path)...}
+
+    new_connections = [(conn.local_address, conn.local_port, conn.remote_address, conn.remote_port) for conn in conn_resolver.get_connections()]
+    new_circuits = {}
+
+    for circuit_id, status, purpose, path in tor_tools.get_conn().get_circuits():
+      # Skips established single-hop circuits (these are for directory
+      # fetches, not client circuits)
+
+      if not (status == "BUILT" and len(path) == 1):
+        new_circuits[circuit_id] = (status, purpose, path)
+
+    # Populates new_entries with any of our old entries that still exist.
+    # This is both for performance and to keep from resetting the uptime
+    # attributes. Note that CircEntries are a ConnectionEntry subclass so
+    # we need to check for them first.
+
+    for old_entry in self._entries:
+      if isinstance(old_entry, circ_entry.CircEntry):
+        new_entry = new_circuits.get(old_entry.circuit_id)
+
+        if new_entry:
+          old_entry.update(new_entry[0], new_entry[2])
+          new_entries.append(old_entry)
+          del new_circuits[old_entry.circuit_id]
+      elif isinstance(old_entry, conn_entry.ConnectionEntry):
+        connection_line = old_entry.getLines()[0]
+        conn_attr = (connection_line.local.get_address(), connection_line.local.get_port(),
+                     connection_line.foreign.get_address(), connection_line.foreign.get_port())
+
+        if conn_attr in new_connections:
+          new_entries.append(old_entry)
+          new_connections.remove(conn_attr)
+
+    # Reset any display attributes for the entries we're keeping
+
+    for entry in new_entries:
+      entry.reset_display()
+
+    # Adds any new connection and circuit entries.
+
+    for local_address, local_port, remote_address, remote_port in new_connections:
+      new_conn_entry = conn_entry.ConnectionEntry(local_address, local_port, remote_address, remote_port)
+      new_conn_line = new_conn_entry.getLines()[0]
+
+      if new_conn_line.get_type() != conn_entry.Category.CIRCUIT:
+        new_entries.append(new_conn_entry)
+
+        # updates exit port and client locale usage information
+        if new_conn_line.is_private():
+          if new_conn_line.get_type() == conn_entry.Category.INBOUND:
+            # client connection, update locale information
+
+            client_locale = new_conn_line.foreign.get_locale()
+
+            if client_locale:
+              self._client_locale_usage[client_locale] = self._client_locale_usage.get(client_locale, 0) + 1
+          elif new_conn_line.get_type() == conn_entry.Category.EXIT:
+            exit_port = new_conn_line.foreign.get_port()
+            self._exit_port_usage[exit_port] = self._exit_port_usage.get(exit_port, 0) + 1
+
+    for circuit_id in new_circuits:
+      status, purpose, path = new_circuits[circuit_id]
+      new_entries.append(circ_entry.CircEntry(circuit_id, status, purpose, path))
+
+    # Counts the relays in each of the categories. This also flushes the
+    # type cache for all of the connections (in case its changed since last
+    # fetched).
+
+    category_types = list(conn_entry.Category)
+    type_counts = dict((type, 0) for type in category_types)
+
+    for entry in new_entries:
+      if isinstance(entry, conn_entry.ConnectionEntry):
+        type_counts[entry.getLines()[0].get_type()] += 1
+      elif isinstance(entry, circ_entry.CircEntry):
+        type_counts[conn_entry.Category.CIRCUIT] += 1
+
+    # makes labels for all the categories with connections (ie,
+    # "21 outbound", "1 control", etc)
+
+    count_labels = []
+
+    for category in category_types:
+      if type_counts[category] > 0:
+        count_labels.append("%i %s" % (type_counts[category], category.lower()))
+
+    if count_labels:
+      self._title = "Connections (%s):" % ", ".join(count_labels)
+    else:
+      self._title = "Connections:"
+
+    self._entries = new_entries
+
+    self._entry_lines = []
+
+    for entry in self._entries:
+      self._entry_lines += entry.getLines()
+
+    self.set_sort_order()
+    self._last_resource_fetch = current_resolution_count
+    self.vals_lock.release()
+
+  def _resolve_apps(self, flag_query = True):
+    """
+    Triggers an asynchronous query for all unresolved SOCKS, HIDDEN, and
+    CONTROL entries.
+
+    Arguments:
+      flag_query - sets a flag to prevent further call from being respected
+                  until the next update if true
+    """
+
+    if self.app_resolve_since_update or not CONFIG["features.connection.resolveApps"]:
+      return
+
+    unresolved_lines = [l for l in self._entry_lines if isinstance(l, conn_entry.ConnectionLine) and l.is_unresolved_application()]
+
+    # get the ports used for unresolved applications
+
+    app_ports = []
+
+    for line in unresolved_lines:
+      app_conn = line.local if line.get_type() == conn_entry.Category.HIDDEN else line.foreign
+      app_ports.append(app_conn.get_port())
+
+    # Queue up resolution for the unresolved ports (skips if it's still working
+    # on the last query).
+
+    if app_ports and not self._app_resolver.is_alive():
+      self._app_resolver.get_processes_using_ports(app_ports)
+
+    # Fetches results. If the query finishes quickly then this is what we just
+    # asked for, otherwise these belong to an earlier resolution.
+    #
+    # The application resolver might have given up querying (for instance, if
+    # the lsof lookups aren't working on this platform or lacks permissions).
+    # The is_application_resolving flag lets the unresolved entries indicate if there's
+    # a lookup in progress for them or not.
+
+    time.sleep(0.2)  # TODO: previous resolver only blocked while awaiting a lookup
+    app_results = self._app_resolver.get_processes_using_ports(app_ports)
+
+    for line in unresolved_lines:
+      is_local = line.get_type() == conn_entry.Category.HIDDEN
+      line_port = line.local.get_port() if is_local else line.foreign.get_port()
+
+      if line_port in app_results:
+        # sets application attributes if there's a result with this as the
+        # inbound port
+
+        for inbound_port, outbound_port, cmd, pid in app_results[line_port]:
+          app_port = outbound_port if is_local else inbound_port
+
+          if line_port == app_port:
+            line.application_name = cmd
+            line.application_pid = pid
+            line.is_application_resolving = False
+      else:
+        line.is_application_resolving = self._app_resolver.is_alive
+
+    if flag_query:
+      self.app_resolve_since_update = True
diff --git a/arm/connections/countPopup.py b/arm/connections/countPopup.py
deleted file mode 100644
index 6f561fd..0000000
--- a/arm/connections/countPopup.py
+++ /dev/null
@@ -1,113 +0,0 @@
-"""
-Provides a dialog with client locale or exiting port counts.
-"""
-
-import curses
-import operator
-
-import arm.controller
-import arm.popups
-
-from arm.util import uiTools
-
-from stem.util import connection, enum, log
-
-CountType = enum.Enum("CLIENT_LOCALE", "EXIT_PORT")
-EXIT_USAGE_WIDTH = 15
-
-
-def showCountDialog(count_type, counts):
-  """
-  Provides a dialog with bar graphs and percentages for the given set of
-  counts. Pressing any key closes the dialog.
-
-  Arguments:
-    count_type - type of counts being presented
-    counts    - mapping of labels to counts
-  """
-
-  is_no_stats = not counts
-  no_stats_msg = "Usage stats aren't available yet, press any key..."
-
-  if is_no_stats:
-    popup, width, height = arm.popups.init(3, len(no_stats_msg) + 4)
-  else:
-    popup, width, height = arm.popups.init(4 + max(1, len(counts)), 80)
-
-  if not popup:
-    return
-
-  try:
-    control = arm.controller.get_controller()
-
-    popup.win.box()
-
-    # dialog title
-
-    if count_type == CountType.CLIENT_LOCALE:
-      title = "Client Locales"
-    elif count_type == CountType.EXIT_PORT:
-      title = "Exiting Port Usage"
-    else:
-      title = ""
-      log.warn("Unrecognized count type: %s" % count_type)
-
-    popup.addstr(0, 0, title, curses.A_STANDOUT)
-
-    if is_no_stats:
-      popup.addstr(1, 2, no_stats_msg, curses.A_BOLD | uiTools.get_color("cyan"))
-    else:
-      sorted_counts = sorted(counts.iteritems(), key=operator.itemgetter(1))
-      sorted_counts.reverse()
-
-      # constructs string formatting for the max key and value display width
-
-      key_width, val_width, value_total = 3, 1, 0
-
-      for k, v in sorted_counts:
-        key_width = max(key_width, len(k))
-        val_width = max(val_width, len(str(v)))
-        value_total += v
-
-      # extra space since we're adding usage informaion
-
-      if count_type == CountType.EXIT_PORT:
-        key_width += EXIT_USAGE_WIDTH
-
-      label_format = "%%-%is %%%ii (%%%%%%-2i)" % (key_width, val_width)
-
-      for i in range(height - 4):
-        k, v = sorted_counts[i]
-
-        # includes a port usage column
-
-        if count_type == CountType.EXIT_PORT:
-          usage = connection.port_usage(k)
-
-          if usage:
-            key_format = "%%-%is   %%s" % (key_width - EXIT_USAGE_WIDTH)
-            k = key_format % (k, usage[:EXIT_USAGE_WIDTH - 3])
-
-        label = label_format % (k, v, v * 100 / value_total)
-        popup.addstr(i + 1, 2, label, curses.A_BOLD | uiTools.get_color("green"))
-
-        # All labels have the same size since they're based on the max widths.
-        # If this changes then this'll need to be the max label width.
-
-        label_width = len(label)
-
-        # draws simple bar graph for percentages
-
-        fill_width = v * (width - 4 - label_width) / value_total
-
-        for j in range(fill_width):
-          popup.addstr(i + 1, 3 + label_width + j, " ", curses.A_STANDOUT | uiTools.get_color("red"))
-
-      popup.addstr(height - 2, 2, "Press any key...")
-
-    popup.win.refresh()
-
-    curses.cbreak()
-    control.get_screen().getch()
-  finally:
-    arm.popups.finalize()
diff --git a/arm/connections/count_popup.py b/arm/connections/count_popup.py
new file mode 100644
index 0000000..b764c05
--- /dev/null
+++ b/arm/connections/count_popup.py
@@ -0,0 +1,113 @@
+"""
+Provides a dialog with client locale or exiting port counts.
+"""
+
+import curses
+import operator
+
+import arm.controller
+import arm.popups
+
+from arm.util import ui_tools
+
+from stem.util import connection, enum, log
+
+CountType = enum.Enum("CLIENT_LOCALE", "EXIT_PORT")
+EXIT_USAGE_WIDTH = 15
+
+
+def showCountDialog(count_type, counts):
+  """
+  Provides a dialog with bar graphs and percentages for the given set of
+  counts. Pressing any key closes the dialog.
+
+  Arguments:
+    count_type - type of counts being presented
+    counts    - mapping of labels to counts
+  """
+
+  is_no_stats = not counts
+  no_stats_msg = "Usage stats aren't available yet, press any key..."
+
+  if is_no_stats:
+    popup, width, height = arm.popups.init(3, len(no_stats_msg) + 4)
+  else:
+    popup, width, height = arm.popups.init(4 + max(1, len(counts)), 80)
+
+  if not popup:
+    return
+
+  try:
+    control = arm.controller.get_controller()
+
+    popup.win.box()
+
+    # dialog title
+
+    if count_type == CountType.CLIENT_LOCALE:
+      title = "Client Locales"
+    elif count_type == CountType.EXIT_PORT:
+      title = "Exiting Port Usage"
+    else:
+      title = ""
+      log.warn("Unrecognized count type: %s" % count_type)
+
+    popup.addstr(0, 0, title, curses.A_STANDOUT)
+
+    if is_no_stats:
+      popup.addstr(1, 2, no_stats_msg, curses.A_BOLD | ui_tools.get_color("cyan"))
+    else:
+      sorted_counts = sorted(counts.iteritems(), key=operator.itemgetter(1))
+      sorted_counts.reverse()
+
+      # constructs string formatting for the max key and value display width
+
+      key_width, val_width, value_total = 3, 1, 0
+
+      for k, v in sorted_counts:
+        key_width = max(key_width, len(k))
+        val_width = max(val_width, len(str(v)))
+        value_total += v
+
+      # extra space since we're adding usage informaion
+
+      if count_type == CountType.EXIT_PORT:
+        key_width += EXIT_USAGE_WIDTH
+
+      label_format = "%%-%is %%%ii (%%%%%%-2i)" % (key_width, val_width)
+
+      for i in range(height - 4):
+        k, v = sorted_counts[i]
+
+        # includes a port usage column
+
+        if count_type == CountType.EXIT_PORT:
+          usage = connection.port_usage(k)
+
+          if usage:
+            key_format = "%%-%is   %%s" % (key_width - EXIT_USAGE_WIDTH)
+            k = key_format % (k, usage[:EXIT_USAGE_WIDTH - 3])
+
+        label = label_format % (k, v, v * 100 / value_total)
+        popup.addstr(i + 1, 2, label, curses.A_BOLD | ui_tools.get_color("green"))
+
+        # All labels have the same size since they're based on the max widths.
+        # If this changes then this'll need to be the max label width.
+
+        label_width = len(label)
+
+        # draws simple bar graph for percentages
+
+        fill_width = v * (width - 4 - label_width) / value_total
+
+        for j in range(fill_width):
+          popup.addstr(i + 1, 3 + label_width + j, " ", curses.A_STANDOUT | ui_tools.get_color("red"))
+
+      popup.addstr(height - 2, 2, "Press any key...")
+
+    popup.win.refresh()
+
+    curses.cbreak()
+    control.get_screen().getch()
+  finally:
+    arm.popups.finalize()
diff --git a/arm/connections/descriptorPopup.py b/arm/connections/descriptorPopup.py
deleted file mode 100644
index 3404c92..0000000
--- a/arm/connections/descriptorPopup.py
+++ /dev/null
@@ -1,272 +0,0 @@
-"""
-Popup providing the raw descriptor and consensus information for a relay.
-"""
-
-import math
-import curses
-
-import arm.popups
-import arm.connections.connEntry
-
-from arm.util import panel, torTools, uiTools
-
-# field keywords used to identify areas for coloring
-
-LINE_NUM_COLOR = "yellow"
-HEADER_COLOR = "cyan"
-HEADER_PREFIX = ["ns/id/", "desc/id/"]
-
-SIG_COLOR = "red"
-SIG_START_KEYS = ["-----BEGIN RSA PUBLIC KEY-----", "-----BEGIN SIGNATURE-----"]
-SIG_END_KEYS = ["-----END RSA PUBLIC KEY-----", "-----END SIGNATURE-----"]
-
-UNRESOLVED_MSG = "No consensus data available"
-ERROR_MSG = "Unable to retrieve data"
-
-
-def show_descriptor_popup(conn_panel):
-  """
-  Presents consensus descriptor in popup window with the following controls:
-  Up, Down, Page Up, Page Down - scroll descriptor
-  Right, Left - next / previous connection
-  Enter, Space, d, D - close popup
-
-  Arguments:
-    conn_panel - connection panel providing the dialog
-  """
-
-  # hides the title of the connection panel
-
-  conn_panel.set_title_visible(False)
-  conn_panel.redraw(True)
-
-  control = arm.controller.get_controller()
-  panel.CURSES_LOCK.acquire()
-  is_done = False
-
-  try:
-    while not is_done:
-      selection = conn_panel.get_selection()
-
-      if not selection:
-        break
-
-      fingerprint = selection.foreign.get_fingerprint()
-
-      if fingerprint == "UNKNOWN":
-        fingerprint = None
-
-      display_text = get_display_text(fingerprint)
-      display_color = arm.connections.connEntry.CATEGORY_COLOR[selection.get_type()]
-      show_line_number = fingerprint is not None
-
-      # determines the maximum popup size the display_text can fill
-
-      popup_height, popup_width = get_preferred_size(display_text, conn_panel.max_x, show_line_number)
-
-      popup, _, height = arm.popups.init(popup_height, popup_width)
-
-      if not popup:
-        break
-
-      scroll, is_changed = 0, True
-
-      try:
-        while not is_done:
-          if is_changed:
-            draw(popup, fingerprint, display_text, display_color, scroll, show_line_number)
-            is_changed = False
-
-          key = control.get_screen().getch()
-
-          if uiTools.is_scroll_key(key):
-            # TODO: This is a bit buggy in that scrolling is by display_text
-            # lines rather than the displayed lines, causing issues when
-            # content wraps. The result is that we can't have a scrollbar and
-            # can't scroll to the bottom if there's a multi-line being
-            # displayed. However, trying to correct this introduces a big can
-            # of worms and after hours decided that this isn't worth the
-            # effort...
-
-            new_scroll = uiTools.get_scroll_position(key, scroll, height - 2, len(display_text))
-
-            if scroll != new_scroll:
-              scroll, is_changed = new_scroll, True
-          elif uiTools.is_selection_key(key) or key in (ord('d'), ord('D')):
-            is_done = True  # closes popup
-          elif key in (curses.KEY_LEFT, curses.KEY_RIGHT):
-            # navigation - pass on to conn_panel and recreate popup
-
-            conn_panel.handle_key(curses.KEY_UP if key == curses.KEY_LEFT else curses.KEY_DOWN)
-            break
-      finally:
-        arm.popups.finalize()
-  finally:
-    conn_panel.set_title_visible(True)
-    conn_panel.redraw(True)
-    panel.CURSES_LOCK.release()
-
-
-def get_display_text(fingerprint):
-  """
-  Provides the descriptor and consensus entry for a relay. This is a list of
-  lines to be displayed by the dialog.
-  """
-
-  if not fingerprint:
-    return [UNRESOLVED_MSG]
-
-  conn, description = torTools.get_conn(), []
-
-  description.append("ns/id/%s" % fingerprint)
-  consensus_entry = conn.get_consensus_entry(fingerprint)
-
-  if consensus_entry:
-    description += consensus_entry.split("\n")
-  else:
-    description += [ERROR_MSG, ""]
-
-  description.append("desc/id/%s" % fingerprint)
-  descriptor_entry = conn.get_descriptor_entry(fingerprint)
-
-  if descriptor_entry:
-    description += descriptor_entry.split("\n")
-  else:
-    description += [ERROR_MSG]
-
-  return description
-
-
-def get_preferred_size(text, max_width, show_line_number):
-  """
-  Provides the (height, width) tuple for the preferred size of the given text.
-  """
-
-  width, height = 0, len(text) + 2
-  line_number_width = int(math.log10(len(text))) + 1
-
-  for line in text:
-    # width includes content, line number field, and border
-
-    line_width = len(line) + 5
-
-    if show_line_number:
-      line_width += line_number_width
-
-    width = max(width, line_width)
-
-    # tracks number of extra lines that will be taken due to text wrap
-    height += (line_width - 2) / max_width
-
-  return (height, width)
-
-
-def draw(popup, fingerprint, display_text, display_color, scroll, show_line_number):
-  popup.win.erase()
-  popup.win.box()
-  x_offset = 2
-
-  if fingerprint:
-    title = "Consensus Descriptor (%s):" % fingerprint
-  else:
-    title = "Consensus Descriptor:"
-
-  popup.addstr(0, 0, title, curses.A_STANDOUT)
-
-  line_number_width = int(math.log10(len(display_text))) + 1
-  is_encryption_block = False   # flag indicating if we're currently displaying a key
-
-  # checks if first line is in an encryption block
-
-  for i in range(0, scroll):
-    line_text = display_text[i].strip()
-
-    if line_text in SIG_START_KEYS:
-      is_encryption_block = True
-    elif line_text in SIG_END_KEYS:
-      is_encryption_block = False
-
-  draw_line, page_height = 1, popup.max_y - 2
-
-  for i in range(scroll, scroll + page_height):
-    line_text = display_text[i].strip()
-    x_offset = 2
-
-    if show_line_number:
-      line_number_label = ("%%%ii" % line_number_width) % (i + 1)
-      line_number_format = curses.A_BOLD | uiTools.get_color(LINE_NUM_COLOR)
-
-      popup.addstr(draw_line, x_offset, line_number_label, line_number_format)
-      x_offset += line_number_width + 1
-
-    # Most consensus and descriptor lines are keyword/value pairs. Both are
-    # shown with the same color, but the keyword is bolded.
-
-    keyword, value = line_text, ""
-    draw_format = uiTools.get_color(display_color)
-
-    if line_text.startswith(HEADER_PREFIX[0]) or line_text.startswith(HEADER_PREFIX[1]):
-      keyword, value = line_text, ""
-      draw_format = uiTools.get_color(HEADER_COLOR)
-    elif line_text == UNRESOLVED_MSG or line_text == ERROR_MSG:
-      keyword, value = line_text, ""
-    elif line_text in SIG_START_KEYS:
-      keyword, value = line_text, ""
-      is_encryption_block = True
-      draw_format = uiTools.get_color(SIG_COLOR)
-    elif line_text in SIG_END_KEYS:
-      keyword, value = line_text, ""
-      is_encryption_block = False
-      draw_format = uiTools.get_color(SIG_COLOR)
-    elif is_encryption_block:
-      keyword, value = "", line_text
-      draw_format = uiTools.get_color(SIG_COLOR)
-    elif " " in line_text:
-      div_index = line_text.find(" ")
-      keyword, value = line_text[:div_index], line_text[div_index:]
-
-    display_queue = [(keyword, draw_format | curses.A_BOLD), (value, draw_format)]
-    cursor_location = x_offset
-
-    while display_queue:
-      msg, msg_format = display_queue.pop(0)
-
-      if not msg:
-        continue
-
-      max_msg_size = popup.max_x - 1 - cursor_location
-
-      if len(msg) >= max_msg_size:
-        # needs to split up the line
-
-        msg, remainder = uiTools.crop_str(msg, max_msg_size, None, end_type = None, get_remainder = True)
-
-        if x_offset == cursor_location and msg == "":
-          # first word is longer than the line
-
-          msg = uiTools.crop_str(remainder, max_msg_size)
-
-          if " " in remainder:
-            remainder = remainder.split(" ", 1)[1]
-          else:
-            remainder = ""
-
-        popup.addstr(draw_line, cursor_location, msg, msg_format)
-        cursor_location = x_offset
-
-        if remainder:
-          display_queue.insert(0, (remainder.strip(), msg_format))
-          draw_line += 1
-      else:
-        popup.addstr(draw_line, cursor_location, msg, msg_format)
-        cursor_location += len(msg)
-
-      if draw_line > page_height:
-        break
-
-    draw_line += 1
-
-    if draw_line > page_height:
-      break
-
-  popup.win.refresh()
diff --git a/arm/connections/descriptor_popup.py b/arm/connections/descriptor_popup.py
new file mode 100644
index 0000000..6dbad9c
--- /dev/null
+++ b/arm/connections/descriptor_popup.py
@@ -0,0 +1,272 @@
+"""
+Popup providing the raw descriptor and consensus information for a relay.
+"""
+
+import math
+import curses
+
+import arm.popups
+import arm.connections.conn_entry
+
+from arm.util import panel, tor_tools, ui_tools
+
+# field keywords used to identify areas for coloring
+
+LINE_NUM_COLOR = "yellow"
+HEADER_COLOR = "cyan"
+HEADER_PREFIX = ["ns/id/", "desc/id/"]
+
+SIG_COLOR = "red"
+SIG_START_KEYS = ["-----BEGIN RSA PUBLIC KEY-----", "-----BEGIN SIGNATURE-----"]
+SIG_END_KEYS = ["-----END RSA PUBLIC KEY-----", "-----END SIGNATURE-----"]
+
+UNRESOLVED_MSG = "No consensus data available"
+ERROR_MSG = "Unable to retrieve data"
+
+
+def show_descriptor_popup(conn_panel):
+  """
+  Presents consensus descriptor in popup window with the following controls:
+  Up, Down, Page Up, Page Down - scroll descriptor
+  Right, Left - next / previous connection
+  Enter, Space, d, D - close popup
+
+  Arguments:
+    conn_panel - connection panel providing the dialog
+  """
+
+  # hides the title of the connection panel
+
+  conn_panel.set_title_visible(False)
+  conn_panel.redraw(True)
+
+  control = arm.controller.get_controller()
+  panel.CURSES_LOCK.acquire()
+  is_done = False
+
+  try:
+    while not is_done:
+      selection = conn_panel.get_selection()
+
+      if not selection:
+        break
+
+      fingerprint = selection.foreign.get_fingerprint()
+
+      if fingerprint == "UNKNOWN":
+        fingerprint = None
+
+      display_text = get_display_text(fingerprint)
+      display_color = arm.connections.conn_entry.CATEGORY_COLOR[selection.get_type()]
+      show_line_number = fingerprint is not None
+
+      # determines the maximum popup size the display_text can fill
+
+      popup_height, popup_width = get_preferred_size(display_text, conn_panel.max_x, show_line_number)
+
+      popup, _, height = arm.popups.init(popup_height, popup_width)
+
+      if not popup:
+        break
+
+      scroll, is_changed = 0, True
+
+      try:
+        while not is_done:
+          if is_changed:
+            draw(popup, fingerprint, display_text, display_color, scroll, show_line_number)
+            is_changed = False
+
+          key = control.get_screen().getch()
+
+          if ui_tools.is_scroll_key(key):
+            # TODO: This is a bit buggy in that scrolling is by display_text
+            # lines rather than the displayed lines, causing issues when
+            # content wraps. The result is that we can't have a scrollbar and
+            # can't scroll to the bottom if there's a multi-line being
+            # displayed. However, trying to correct this introduces a big can
+            # of worms and after hours decided that this isn't worth the
+            # effort...
+
+            new_scroll = ui_tools.get_scroll_position(key, scroll, height - 2, len(display_text))
+
+            if scroll != new_scroll:
+              scroll, is_changed = new_scroll, True
+          elif ui_tools.is_selection_key(key) or key in (ord('d'), ord('D')):
+            is_done = True  # closes popup
+          elif key in (curses.KEY_LEFT, curses.KEY_RIGHT):
+            # navigation - pass on to conn_panel and recreate popup
+
+            conn_panel.handle_key(curses.KEY_UP if key == curses.KEY_LEFT else curses.KEY_DOWN)
+            break
+      finally:
+        arm.popups.finalize()
+  finally:
+    conn_panel.set_title_visible(True)
+    conn_panel.redraw(True)
+    panel.CURSES_LOCK.release()
+
+
+def get_display_text(fingerprint):
+  """
+  Provides the descriptor and consensus entry for a relay. This is a list of
+  lines to be displayed by the dialog.
+  """
+
+  if not fingerprint:
+    return [UNRESOLVED_MSG]
+
+  conn, description = tor_tools.get_conn(), []
+
+  description.append("ns/id/%s" % fingerprint)
+  consensus_entry = conn.get_consensus_entry(fingerprint)
+
+  if consensus_entry:
+    description += consensus_entry.split("\n")
+  else:
+    description += [ERROR_MSG, ""]
+
+  description.append("desc/id/%s" % fingerprint)
+  descriptor_entry = conn.get_descriptor_entry(fingerprint)
+
+  if descriptor_entry:
+    description += descriptor_entry.split("\n")
+  else:
+    description += [ERROR_MSG]
+
+  return description
+
+
+def get_preferred_size(text, max_width, show_line_number):
+  """
+  Provides the (height, width) tuple for the preferred size of the given text.
+  """
+
+  width, height = 0, len(text) + 2
+  line_number_width = int(math.log10(len(text))) + 1
+
+  for line in text:
+    # width includes content, line number field, and border
+
+    line_width = len(line) + 5
+
+    if show_line_number:
+      line_width += line_number_width
+
+    width = max(width, line_width)
+
+    # tracks number of extra lines that will be taken due to text wrap
+    height += (line_width - 2) / max_width
+
+  return (height, width)
+
+
+def draw(popup, fingerprint, display_text, display_color, scroll, show_line_number):
+  popup.win.erase()
+  popup.win.box()
+  x_offset = 2
+
+  if fingerprint:
+    title = "Consensus Descriptor (%s):" % fingerprint
+  else:
+    title = "Consensus Descriptor:"
+
+  popup.addstr(0, 0, title, curses.A_STANDOUT)
+
+  line_number_width = int(math.log10(len(display_text))) + 1
+  is_encryption_block = False   # flag indicating if we're currently displaying a key
+
+  # checks if first line is in an encryption block
+
+  for i in range(0, scroll):
+    line_text = display_text[i].strip()
+
+    if line_text in SIG_START_KEYS:
+      is_encryption_block = True
+    elif line_text in SIG_END_KEYS:
+      is_encryption_block = False
+
+  draw_line, page_height = 1, popup.max_y - 2
+
+  for i in range(scroll, scroll + page_height):
+    line_text = display_text[i].strip()
+    x_offset = 2
+
+    if show_line_number:
+      line_number_label = ("%%%ii" % line_number_width) % (i + 1)
+      line_number_format = curses.A_BOLD | ui_tools.get_color(LINE_NUM_COLOR)
+
+      popup.addstr(draw_line, x_offset, line_number_label, line_number_format)
+      x_offset += line_number_width + 1
+
+    # Most consensus and descriptor lines are keyword/value pairs. Both are
+    # shown with the same color, but the keyword is bolded.
+
+    keyword, value = line_text, ""
+    draw_format = ui_tools.get_color(display_color)
+
+    if line_text.startswith(HEADER_PREFIX[0]) or line_text.startswith(HEADER_PREFIX[1]):
+      keyword, value = line_text, ""
+      draw_format = ui_tools.get_color(HEADER_COLOR)
+    elif line_text == UNRESOLVED_MSG or line_text == ERROR_MSG:
+      keyword, value = line_text, ""
+    elif line_text in SIG_START_KEYS:
+      keyword, value = line_text, ""
+      is_encryption_block = True
+      draw_format = ui_tools.get_color(SIG_COLOR)
+    elif line_text in SIG_END_KEYS:
+      keyword, value = line_text, ""
+      is_encryption_block = False
+      draw_format = ui_tools.get_color(SIG_COLOR)
+    elif is_encryption_block:
+      keyword, value = "", line_text
+      draw_format = ui_tools.get_color(SIG_COLOR)
+    elif " " in line_text:
+      div_index = line_text.find(" ")
+      keyword, value = line_text[:div_index], line_text[div_index:]
+
+    display_queue = [(keyword, draw_format | curses.A_BOLD), (value, draw_format)]
+    cursor_location = x_offset
+
+    while display_queue:
+      msg, msg_format = display_queue.pop(0)
+
+      if not msg:
+        continue
+
+      max_msg_size = popup.max_x - 1 - cursor_location
+
+      if len(msg) >= max_msg_size:
+        # needs to split up the line
+
+        msg, remainder = ui_tools.crop_str(msg, max_msg_size, None, end_type = None, get_remainder = True)
+
+        if x_offset == cursor_location and msg == "":
+          # first word is longer than the line
+
+          msg = ui_tools.crop_str(remainder, max_msg_size)
+
+          if " " in remainder:
+            remainder = remainder.split(" ", 1)[1]
+          else:
+            remainder = ""
+
+        popup.addstr(draw_line, cursor_location, msg, msg_format)
+        cursor_location = x_offset
+
+        if remainder:
+          display_queue.insert(0, (remainder.strip(), msg_format))
+          draw_line += 1
+      else:
+        popup.addstr(draw_line, cursor_location, msg, msg_format)
+        cursor_location += len(msg)
+
+      if draw_line > page_height:
+        break
+
+    draw_line += 1
+
+    if draw_line > page_height:
+      break
+
+  popup.win.refresh()
diff --git a/arm/controller.py b/arm/controller.py
index 52a3c7c..98b5adb 100644
--- a/arm/controller.py
+++ b/arm/controller.py
@@ -11,20 +11,20 @@ import threading
 import arm.arguments
 import arm.menu.menu
 import arm.popups
-import arm.headerPanel
-import arm.logPanel
-import arm.configPanel
-import arm.torrcPanel
-import arm.graphing.graphPanel
-import arm.graphing.bandwidthStats
-import arm.graphing.connStats
-import arm.graphing.resourceStats
-import arm.connections.connPanel
+import arm.header_panel
+import arm.log_panel
+import arm.config_panel
+import arm.torrc_panel
+import arm.graphing.graph_panel
+import arm.graphing.bandwidth_stats
+import arm.graphing.conn_stats
+import arm.graphing.resource_stats
+import arm.connections.conn_panel
 import arm.util.tracker
 
 from stem.control import State
 
-from arm.util import panel, torConfig, torTools
+from arm.util import panel, tor_config, tor_tools
 
 from stem.util import conf, enum, log, system
 
@@ -102,7 +102,7 @@ def init_controller(stdscr, start_time):
   # initializes the panels
 
   sticky_panels = [
-    arm.headerPanel.HeaderPanel(stdscr, start_time),
+    arm.header_panel.HeaderPanel(stdscr, start_time),
     LabelPanel(stdscr),
   ]
 
@@ -110,24 +110,24 @@ def init_controller(stdscr, start_time):
 
   # first page: graph and log
   if CONFIG["features.panels.show.graph"]:
-    first_page_panels.append(arm.graphing.graphPanel.GraphPanel(stdscr))
+    first_page_panels.append(arm.graphing.graph_panel.GraphPanel(stdscr))
 
   if CONFIG["features.panels.show.log"]:
     expanded_events = arm.arguments.expand_events(CONFIG["startup.events"])
-    first_page_panels.append(arm.logPanel.LogPanel(stdscr, expanded_events))
+    first_page_panels.append(arm.log_panel.LogPanel(stdscr, expanded_events))
 
   if first_page_panels:
     page_panels.append(first_page_panels)
 
   # second page: connections
   if CONFIG["features.panels.show.connection"]:
-    page_panels.append([arm.connections.connPanel.ConnectionPanel(stdscr)])
+    page_panels.append([arm.connections.conn_panel.ConnectionPanel(stdscr)])
 
     # The DisableDebuggerAttachment will prevent our connection panel from really
     # functioning. It'll have circuits, but little else. If this is the case then
     # notify the user and tell them what they can do to fix it.
 
-    controller = torTools.get_conn().controller
+    controller = tor_tools.get_conn().controller
 
     if controller.get_conf("DisableDebuggerAttachment", None) == "1":
       log.notice("Tor is preventing system utilities like netstat and lsof from working. This means that arm can't provide you with connection information. You can change this by adding 'DisableDebuggerAttachment 0' to your torrc and restarting tor. For more information see...\nhttps://trac.torproject.org/3313")
@@ -159,12 +159,12 @@ def init_controller(stdscr, start_time):
   # third page: config
 
   if CONFIG["features.panels.show.config"]:
-    page_panels.append([arm.configPanel.ConfigPanel(stdscr, arm.configPanel.State.TOR)])
+    page_panels.append([arm.config_panel.ConfigPanel(stdscr, arm.config_panel.State.TOR)])
 
   # fourth page: torrc
 
   if CONFIG["features.panels.show.torrc"]:
-    page_panels.append([arm.torrcPanel.TorrcPanel(stdscr, arm.torrcPanel.Config.TORRC)])
+    page_panels.append([arm.torrc_panel.TorrcPanel(stdscr, arm.torrc_panel.Config.TORRC)])
 
   # initializes the controller
 
@@ -177,12 +177,12 @@ def init_controller(stdscr, start_time):
   if graph_panel:
     # statistical monitors for graph
 
-    bw_stats = arm.graphing.bandwidthStats.BandwidthStats()
+    bw_stats = arm.graphing.bandwidth_stats.BandwidthStats()
     graph_panel.add_stats(GraphStat.BANDWIDTH, bw_stats)
-    graph_panel.add_stats(GraphStat.SYSTEM_RESOURCES, arm.graphing.resourceStats.ResourceStats())
+    graph_panel.add_stats(GraphStat.SYSTEM_RESOURCES, arm.graphing.resource_stats.ResourceStats())
 
     if CONFIG["features.panels.show.connection"]:
-      graph_panel.add_stats(GraphStat.CONNECTIONS, arm.graphing.connStats.ConnStats())
+      graph_panel.add_stats(GraphStat.CONNECTIONS, arm.graphing.conn_stats.ConnStats())
 
     # sets graph based on config parameter
 
@@ -194,7 +194,7 @@ def init_controller(stdscr, start_time):
 
     # prepopulates bandwidth values from state file
 
-    if CONFIG["features.graph.bw.prepopulate"] and torTools.get_conn().is_alive():
+    if CONFIG["features.graph.bw.prepopulate"] and tor_tools.get_conn().is_alive():
       is_successful = bw_stats.prepopulate_from_state()
 
       if is_successful:
@@ -517,7 +517,7 @@ class Controller:
     # down the instance
 
     is_shutdown_flag_present = False
-    torrc_contents = torConfig.get_torrc().get_contents()
+    torrc_contents = tor_config.get_torrc().get_contents()
 
     if torrc_contents:
       for line in torrc_contents:
@@ -527,7 +527,7 @@ class Controller:
 
     if is_shutdown_flag_present:
       try:
-        torTools.get_conn().shutdown()
+        tor_tools.get_conn().shutdown()
       except IOError as exc:
         arm.popups.show_msg(str(exc), 3, curses.A_BOLD)
 
@@ -540,7 +540,7 @@ def heartbeat_check(is_unresponsive):
     is_unresponsive - flag for if we've indicated to be responsive or not
   """
 
-  conn = torTools.get_conn()
+  conn = tor_tools.get_conn()
   last_heartbeat = conn.controller.get_latest_heartbeat()
 
   if conn.is_alive():
@@ -572,7 +572,7 @@ def conn_reset_listener(controller, event_type, _):
       # new contents are loaded.
 
       if get_controller().get_panel("torrc") is None:
-        torConfig.get_torrc().load(True)
+        tor_config.get_torrc().load(True)
 
 
 def start_arm(stdscr):
@@ -670,7 +670,7 @@ def start_arm(stdscr):
 
       if confirmation_key in (ord('x'), ord('X')):
         try:
-          torTools.get_conn().reload()
+          tor_tools.get_conn().reload()
         except IOError as exc:
           log.error("Error detected when reloading tor: %s" % exc.strerror)
     elif key == ord('h') or key == ord('H'):
diff --git a/arm/graphing/__init__.py b/arm/graphing/__init__.py
index d93512d..aebef5d 100644
--- a/arm/graphing/__init__.py
+++ b/arm/graphing/__init__.py
@@ -2,4 +2,4 @@
 Graphing panel resources.
 """
 
-__all__ = ["graph_panel", "bandwidthStats", "connStats", "resourceStats"]
+__all__ = ["graph_panel", "bandwidth_stats", "conn_stats", "resource_stats"]
diff --git a/arm/graphing/bandwidthStats.py b/arm/graphing/bandwidthStats.py
deleted file mode 100644
index e42a902..0000000
--- a/arm/graphing/bandwidthStats.py
+++ /dev/null
@@ -1,508 +0,0 @@
-"""
-Tracks bandwidth usage of the tor process, expanding to include accounting
-stats if they're set.
-"""
-
-import time
-import curses
-
-import arm.controller
-
-from arm.graphing import graphPanel
-from arm.util import torTools, uiTools
-
-from stem.control import State
-from stem.util import conf, log, str_tools, system
-
-
-def conf_handler(key, value):
-  if key == "features.graph.bw.accounting.rate":
-    return max(1, value)
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.graph.bw.transferInBytes": False,
-  "features.graph.bw.accounting.show": True,
-  "features.graph.bw.accounting.rate": 10,
-  "features.graph.bw.accounting.isTimeLong": False,
-  "tor.chroot": "",
-}, conf_handler)
-
-DL_COLOR, UL_COLOR = "green", "cyan"
-
-# width at which panel abandons placing optional stats (avg and total) with
-# header in favor of replacing the x-axis label
-
-COLLAPSE_WIDTH = 135
-
-# valid keys for the accounting_info mapping
-
-ACCOUNTING_ARGS = ("status", "reset_time", "read", "written", "read_limit", "writtenLimit")
-
-PREPOPULATE_SUCCESS_MSG = "Read the last day of bandwidth history from the state file"
-PREPOPULATE_FAILURE_MSG = "Unable to prepopulate bandwidth information (%s)"
-
-
-class BandwidthStats(graphPanel.GraphStats):
-  """
-  Uses tor BW events to generate bandwidth usage graph.
-  """
-
-  def __init__(self, is_pause_buffer = False):
-    graphPanel.GraphStats.__init__(self)
-
-    # stats prepopulated from tor's state file
-
-    self.prepopulate_primary_total = 0
-    self.prepopulate_secondary_total = 0
-    self.prepopulate_ticks = 0
-
-    # accounting data (set by _update_accounting_info method)
-
-    self.accounting_last_updated = 0
-    self.accounting_info = dict([(arg, "") for arg in ACCOUNTING_ARGS])
-
-    # listens for tor reload (sighup) events which can reset the bandwidth
-    # rate/burst and if tor's using accounting
-
-    conn = torTools.get_conn()
-    self._title_stats, self.is_accounting = [], False
-
-    if not is_pause_buffer:
-      self.reset_listener(conn.get_controller(), State.INIT, None)  # initializes values
-
-    conn.add_status_listener(self.reset_listener)
-
-    # Initialized the bandwidth totals to the values reported by Tor. This
-    # uses a controller options introduced in ticket 2345:
-    # https://trac.torproject.org/projects/tor/ticket/2345
-    #
-    # further updates are still handled via BW events to avoid unnecessary
-    # GETINFO requests.
-
-    self.initial_primary_total = 0
-    self.initial_secondary_total = 0
-
-    read_total = conn.get_info("traffic/read", None)
-
-    if read_total and read_total.isdigit():
-      self.initial_primary_total = int(read_total) / 1024  # Bytes -> KB
-
-    write_total = conn.get_info("traffic/written", None)
-
-    if write_total and write_total.isdigit():
-      self.initial_secondary_total = int(write_total) / 1024  # Bytes -> KB
-
-  def clone(self, new_copy = None):
-    if not new_copy:
-      new_copy = BandwidthStats(True)
-
-    new_copy.accounting_last_updated = self.accounting_last_updated
-    new_copy.accounting_info = self.accounting_info
-
-    # attributes that would have been initialized from calling the reset_listener
-
-    new_copy.is_accounting = self.is_accounting
-    new_copy._title_stats = self._title_stats
-
-    return graphPanel.GraphStats.clone(self, new_copy)
-
-  def reset_listener(self, controller, event_type, _):
-    # updates title parameters and accounting status if they changed
-
-    self._title_stats = []     # force reset of title
-    self.new_desc_event(None)  # updates title params
-
-    if event_type in (State.INIT, State.RESET) and CONFIG["features.graph.bw.accounting.show"]:
-      is_accounting_enabled = controller.get_info('accounting/enabled', None) == '1'
-
-      if is_accounting_enabled != self.is_accounting:
-        self.is_accounting = is_accounting_enabled
-
-        # redraws the whole screen since our height changed
-
-        arm.controller.get_controller().redraw()
-
-    # redraws to reflect changes (this especially noticeable when we have
-    # accounting and shut down since it then gives notice of the shutdown)
-
-    if self._graph_panel and self.is_selected:
-      self._graph_panel.redraw(True)
-
-  def prepopulate_from_state(self):
-    """
-    Attempts to use tor's state file to prepopulate values for the 15 minute
-    interval via the BWHistoryReadValues/BWHistoryWriteValues values. This
-    returns True if successful and False otherwise.
-    """
-
-    # checks that this is a relay (if ORPort is unset, then skip)
-
-    conn = torTools.get_conn()
-    or_port = conn.get_option("ORPort", None)
-
-    if or_port == "0":
-      return
-
-    # gets the uptime (using the same parameters as the header panel to take
-    # advantage of caching)
-    # TODO: stem dropped system caching support so we'll need to think of
-    # something else
-
-    uptime = None
-    query_pid = conn.controller.get_pid(None)
-
-    if query_pid:
-      query_param = ["%cpu", "rss", "%mem", "etime"]
-      query_cmd = "ps -p %s -o %s" % (query_pid, ",".join(query_param))
-      ps_call = system.call(query_cmd, None)
-
-      if ps_call and len(ps_call) == 2:
-        stats = ps_call[1].strip().split()
-
-        if len(stats) == 4:
-          uptime = stats[3]
-
-    # checks if tor has been running for at least a day, the reason being that
-    # the state tracks a day's worth of data and this should only prepopulate
-    # results associated with this tor instance
-
-    if not uptime or not "-" in uptime:
-      msg = PREPOPULATE_FAILURE_MSG % "insufficient uptime"
-      log.notice(msg)
-      return False
-
-    # get the user's data directory (usually '~/.tor')
-
-    data_dir = conn.get_option("DataDirectory", None)
-
-    if not data_dir:
-      msg = PREPOPULATE_FAILURE_MSG % "data directory not found"
-      log.notice(msg)
-      return False
-
-    # attempt to open the state file
-
-    try:
-      state_file = open("%s%s/state" % (CONFIG['tor.chroot'], data_dir), "r")
-    except IOError:
-      msg = PREPOPULATE_FAILURE_MSG % "unable to read the state file"
-      log.notice(msg)
-      return False
-
-    # get the BWHistory entries (ordered oldest to newest) and number of
-    # intervals since last recorded
-
-    bw_read_entries, bw_write_entries = None, None
-    missing_read_entries, missing_write_entries = None, None
-
-    # converts from gmt to local with respect to DST
-
-    tz_offset = time.altzone if time.localtime()[8] else time.timezone
-
-    for line in state_file:
-      line = line.strip()
-
-      # According to the rep_hist_update_state() function the BWHistory*Ends
-      # correspond to the start of the following sampling period. Also, the
-      # most recent values of BWHistory*Values appear to be an incremental
-      # counter for the current sampling period. Hence, offsets are added to
-      # account for both.
-
-      if line.startswith("BWHistoryReadValues"):
-        bw_read_entries = line[20:].split(",")
-        bw_read_entries = [int(entry) / 1024.0 / 900 for entry in bw_read_entries]
-        bw_read_entries.pop()
-      elif line.startswith("BWHistoryWriteValues"):
-        bw_write_entries = line[21:].split(",")
-        bw_write_entries = [int(entry) / 1024.0 / 900 for entry in bw_write_entries]
-        bw_write_entries.pop()
-      elif line.startswith("BWHistoryReadEnds"):
-        last_read_time = time.mktime(time.strptime(line[18:], "%Y-%m-%d %H:%M:%S")) - tz_offset
-        last_read_time -= 900
-        missing_read_entries = int((time.time() - last_read_time) / 900)
-      elif line.startswith("BWHistoryWriteEnds"):
-        last_write_time = time.mktime(time.strptime(line[19:], "%Y-%m-%d %H:%M:%S")) - tz_offset
-        last_write_time -= 900
-        missing_write_entries = int((time.time() - last_write_time) / 900)
-
-    if not bw_read_entries or not bw_write_entries or not last_read_time or not last_write_time:
-      msg = PREPOPULATE_FAILURE_MSG % "bandwidth stats missing from state file"
-      log.notice(msg)
-      return False
-
-    # fills missing entries with the last value
-
-    bw_read_entries += [bw_read_entries[-1]] * missing_read_entries
-    bw_write_entries += [bw_write_entries[-1]] * missing_write_entries
-
-    # crops starting entries so they're the same size
-
-    entry_count = min(len(bw_read_entries), len(bw_write_entries), self.max_column)
-    bw_read_entries = bw_read_entries[len(bw_read_entries) - entry_count:]
-    bw_write_entries = bw_write_entries[len(bw_write_entries) - entry_count:]
-
-    # gets index for 15-minute interval
-
-    interval_index = 0
-
-    for index_entry in graphPanel.UPDATE_INTERVALS:
-      if index_entry[1] == 900:
-        break
-      else:
-        interval_index += 1
-
-    # fills the graphing parameters with state information
-
-    for i in range(entry_count):
-      read_value, write_value = bw_read_entries[i], bw_write_entries[i]
-
-      self.last_primary, self.last_secondary = read_value, write_value
-
-      self.prepopulate_primary_total += read_value * 900
-      self.prepopulate_secondary_total += write_value * 900
-      self.prepopulate_ticks += 900
-
-      self.primary_counts[interval_index].insert(0, read_value)
-      self.secondary_counts[interval_index].insert(0, write_value)
-
-    self.max_primary[interval_index] = max(self.primary_counts)
-    self.max_secondary[interval_index] = max(self.secondary_counts)
-
-    del self.primary_counts[interval_index][self.max_column + 1:]
-    del self.secondary_counts[interval_index][self.max_column + 1:]
-
-    msg = PREPOPULATE_SUCCESS_MSG
-    missing_sec = time.time() - min(last_read_time, last_write_time)
-
-    if missing_sec:
-      msg += " (%s is missing)" % str_tools.get_time_label(missing_sec, 0, True)
-
-    log.notice(msg)
-
-    return True
-
-  def bandwidth_event(self, event):
-    if self.is_accounting and self.is_next_tick_redraw():
-      if time.time() - self.accounting_last_updated >= CONFIG["features.graph.bw.accounting.rate"]:
-        self._update_accounting_info()
-
-    # scales units from B to KB for graphing
-
-    self._process_event(event.read / 1024.0, event.written / 1024.0)
-
-  def draw(self, panel, width, height):
-    # line of the graph's x-axis labeling
-
-    labeling_line = graphPanel.GraphStats.get_content_height(self) + panel.graph_height - 2
-
-    # if display is narrow, overwrites x-axis labels with avg / total stats
-
-    if width <= COLLAPSE_WIDTH:
-      # clears line
-
-      panel.addstr(labeling_line, 0, " " * width)
-      graph_column = min((width - 10) / 2, self.max_column)
-
-      primary_footer = "%s, %s" % (self._get_avg_label(True), self._get_total_label(True))
-      secondary_footer = "%s, %s" % (self._get_avg_label(False), self._get_total_label(False))
-
-      panel.addstr(labeling_line, 1, primary_footer, uiTools.get_color(self.get_color(True)))
-      panel.addstr(labeling_line, graph_column + 6, secondary_footer, uiTools.get_color(self.get_color(False)))
-
-    # provides accounting stats if enabled
-
-    if self.is_accounting:
-      if torTools.get_conn().is_alive():
-        status = self.accounting_info["status"]
-
-        hibernate_color = "green"
-
-        if status == "soft":
-          hibernate_color = "yellow"
-        elif status == "hard":
-          hibernate_color = "red"
-        elif status == "":
-          # failed to be queried
-          status, hibernate_color = "unknown", "red"
-
-        panel.addstr(labeling_line + 2, 0, "Accounting (", curses.A_BOLD)
-        panel.addstr(labeling_line + 2, 12, status, curses.A_BOLD | uiTools.get_color(hibernate_color))
-        panel.addstr(labeling_line + 2, 12 + len(status), ")", curses.A_BOLD)
-
-        reset_time = self.accounting_info["reset_time"]
-
-        if not reset_time:
-          reset_time = "unknown"
-
-        panel.addstr(labeling_line + 2, 35, "Time to reset: %s" % reset_time)
-
-        used, total = self.accounting_info["read"], self.accounting_info["read_limit"]
-
-        if used and total:
-          panel.addstr(labeling_line + 3, 2, "%s / %s" % (used, total), uiTools.get_color(self.get_color(True)))
-
-        used, total = self.accounting_info["written"], self.accounting_info["writtenLimit"]
-
-        if used and total:
-          panel.addstr(labeling_line + 3, 37, "%s / %s" % (used, total), uiTools.get_color(self.get_color(False)))
-      else:
-        panel.addstr(labeling_line + 2, 0, "Accounting:", curses.A_BOLD)
-        panel.addstr(labeling_line + 2, 12, "Connection Closed...")
-
-  def get_title(self, width):
-    stats = list(self._title_stats)
-
-    while True:
-      if not stats:
-        return "Bandwidth:"
-      else:
-        label = "Bandwidth (%s):" % ", ".join(stats)
-
-        if len(label) > width:
-          del stats[-1]
-        else:
-          return label
-
-  def get_header_label(self, width, is_primary):
-    graph_type = "Download" if is_primary else "Upload"
-    stats = [""]
-
-    # if wide then avg and total are part of the header, otherwise they're on
-    # the x-axis
-
-    if width * 2 > COLLAPSE_WIDTH:
-      stats = [""] * 3
-      stats[1] = "- %s" % self._get_avg_label(is_primary)
-      stats[2] = ", %s" % self._get_total_label(is_primary)
-
-    stats[0] = "%-14s" % ("%s/sec" % str_tools.get_size_label((self.last_primary if is_primary else self.last_secondary) * 1024, 1, False, CONFIG["features.graph.bw.transferInBytes"]))
-
-    # drops label's components if there's not enough space
-
-    labeling = graph_type + " (" + "".join(stats).strip() + "):"
-
-    while len(labeling) >= width:
-      if len(stats) > 1:
-        del stats[-1]
-        labeling = graph_type + " (" + "".join(stats).strip() + "):"
-      else:
-        labeling = graph_type + ":"
-        break
-
-    return labeling
-
-  def get_color(self, is_primary):
-    return DL_COLOR if is_primary else UL_COLOR
-
-  def get_content_height(self):
-    base_height = graphPanel.GraphStats.get_content_height(self)
-    return base_height + 3 if self.is_accounting else base_height
-
-  def new_desc_event(self, event):
-    # updates self._title_stats with updated values
-
-    conn = torTools.get_conn()
-
-    if not conn.is_alive():
-      return  # keep old values
-
-    my_fingerprint = conn.get_info("fingerprint", None)
-
-    if not self._title_stats or not my_fingerprint or (event and my_fingerprint in event.idlist):
-      stats = []
-      bw_rate = conn.get_my_bandwidth_rate()
-      bw_burst = conn.get_my_bandwidth_burst()
-      bw_observed = conn.get_my_bandwidth_observed()
-      bw_measured = conn.get_my_bandwidth_measured()
-      label_in_bytes = CONFIG["features.graph.bw.transferInBytes"]
-
-      if bw_rate and bw_burst:
-        bw_rate_label = str_tools.get_size_label(bw_rate, 1, False, label_in_bytes)
-        bw_burst_label = str_tools.get_size_label(bw_burst, 1, False, label_in_bytes)
-
-        # if both are using rounded values then strip off the ".0" decimal
-
-        if ".0" in bw_rate_label and ".0" in bw_burst_label:
-          bw_rate_label = bw_rate_label.replace(".0", "")
-          bw_burst_label = bw_burst_label.replace(".0", "")
-
-        stats.append("limit: %s/s" % bw_rate_label)
-        stats.append("burst: %s/s" % bw_burst_label)
-
-      # Provide the observed bandwidth either if the measured bandwidth isn't
-      # available or if the measured bandwidth is the observed (this happens
-      # if there isn't yet enough bandwidth measurements).
-
-      if bw_observed and (not bw_measured or bw_measured == bw_observed):
-        stats.append("observed: %s/s" % str_tools.get_size_label(bw_observed, 1, False, label_in_bytes))
-      elif bw_measured:
-        stats.append("measured: %s/s" % str_tools.get_size_label(bw_measured, 1, False, label_in_bytes))
-
-      self._title_stats = stats
-
-  def _get_avg_label(self, is_primary):
-    total = self.primary_total if is_primary else self.secondary_total
-    total += self.prepopulate_primary_total if is_primary else self.prepopulate_secondary_total
-
-    return "avg: %s/sec" % str_tools.get_size_label((total / max(1, self.tick + self.prepopulate_ticks)) * 1024, 1, False, CONFIG["features.graph.bw.transferInBytes"])
-
-  def _get_total_label(self, is_primary):
-    total = self.primary_total if is_primary else self.secondary_total
-    total += self.initial_primary_total if is_primary else self.initial_secondary_total
-    return "total: %s" % str_tools.get_size_label(total * 1024, 1)
-
-  def _update_accounting_info(self):
-    """
-    Updates mapping used for accounting info. This includes the following keys:
-    status, reset_time, read, written, read_limit, writtenLimit
-
-    Any failed lookups result in a mapping to an empty string.
-    """
-
-    conn = torTools.get_conn()
-    queried = dict([(arg, "") for arg in ACCOUNTING_ARGS])
-    queried["status"] = conn.get_info("accounting/hibernating", None)
-
-    # provides a nicely formatted reset time
-
-    end_interval = conn.get_info("accounting/interval-end", None)
-
-    if end_interval:
-      # converts from gmt to local with respect to DST
-
-      if time.localtime()[8]:
-        tz_offset = time.altzone
-      else:
-        tz_offset = time.timezone
-
-      sec = time.mktime(time.strptime(end_interval, "%Y-%m-%d %H:%M:%S")) - time.time() - tz_offset
-
-      if CONFIG["features.graph.bw.accounting.isTimeLong"]:
-        queried["reset_time"] = ", ".join(str_tools.get_time_labels(sec, True))
-      else:
-        days = sec / 86400
-        sec %= 86400
-        hours = sec / 3600
-        sec %= 3600
-        minutes = sec / 60
-        sec %= 60
-        queried["reset_time"] = "%i:%02i:%02i:%02i" % (days, hours, minutes, sec)
-
-    # number of bytes used and in total for the accounting period
-
-    used = conn.get_info("accounting/bytes", None)
-    left = conn.get_info("accounting/bytes-left", None)
-
-    if used and left:
-      used_comp, left_comp = used.split(" "), left.split(" ")
-      read, written = int(used_comp[0]), int(used_comp[1])
-      read_left, written_left = int(left_comp[0]), int(left_comp[1])
-
-      queried["read"] = str_tools.get_size_label(read)
-      queried["written"] = str_tools.get_size_label(written)
-      queried["read_limit"] = str_tools.get_size_label(read + read_left)
-      queried["writtenLimit"] = str_tools.get_size_label(written + written_left)
-
-    self.accounting_info = queried
-    self.accounting_last_updated = time.time()
diff --git a/arm/graphing/bandwidth_stats.py b/arm/graphing/bandwidth_stats.py
new file mode 100644
index 0000000..9cba96f
--- /dev/null
+++ b/arm/graphing/bandwidth_stats.py
@@ -0,0 +1,508 @@
+"""
+Tracks bandwidth usage of the tor process, expanding to include accounting
+stats if they're set.
+"""
+
+import time
+import curses
+
+import arm.controller
+
+from arm.graphing import graph_panel
+from arm.util import tor_tools, ui_tools
+
+from stem.control import State
+from stem.util import conf, log, str_tools, system
+
+
+def conf_handler(key, value):
+  if key == "features.graph.bw.accounting.rate":
+    return max(1, value)
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.graph.bw.transferInBytes": False,
+  "features.graph.bw.accounting.show": True,
+  "features.graph.bw.accounting.rate": 10,
+  "features.graph.bw.accounting.isTimeLong": False,
+  "tor.chroot": "",
+}, conf_handler)
+
+DL_COLOR, UL_COLOR = "green", "cyan"
+
+# width at which panel abandons placing optional stats (avg and total) with
+# header in favor of replacing the x-axis label
+
+COLLAPSE_WIDTH = 135
+
+# valid keys for the accounting_info mapping
+
+ACCOUNTING_ARGS = ("status", "reset_time", "read", "written", "read_limit", "writtenLimit")
+
+PREPOPULATE_SUCCESS_MSG = "Read the last day of bandwidth history from the state file"
+PREPOPULATE_FAILURE_MSG = "Unable to prepopulate bandwidth information (%s)"
+
+
+class BandwidthStats(graph_panel.GraphStats):
+  """
+  Uses tor BW events to generate bandwidth usage graph.
+  """
+
+  def __init__(self, is_pause_buffer = False):
+    graph_panel.GraphStats.__init__(self)
+
+    # stats prepopulated from tor's state file
+
+    self.prepopulate_primary_total = 0
+    self.prepopulate_secondary_total = 0
+    self.prepopulate_ticks = 0
+
+    # accounting data (set by _update_accounting_info method)
+
+    self.accounting_last_updated = 0
+    self.accounting_info = dict([(arg, "") for arg in ACCOUNTING_ARGS])
+
+    # listens for tor reload (sighup) events which can reset the bandwidth
+    # rate/burst and if tor's using accounting
+
+    conn = tor_tools.get_conn()
+    self._title_stats, self.is_accounting = [], False
+
+    if not is_pause_buffer:
+      self.reset_listener(conn.get_controller(), State.INIT, None)  # initializes values
+
+    conn.add_status_listener(self.reset_listener)
+
+    # Initialized the bandwidth totals to the values reported by Tor. This
+    # uses a controller options introduced in ticket 2345:
+    # https://trac.torproject.org/projects/tor/ticket/2345
+    #
+    # further updates are still handled via BW events to avoid unnecessary
+    # GETINFO requests.
+
+    self.initial_primary_total = 0
+    self.initial_secondary_total = 0
+
+    read_total = conn.get_info("traffic/read", None)
+
+    if read_total and read_total.isdigit():
+      self.initial_primary_total = int(read_total) / 1024  # Bytes -> KB
+
+    write_total = conn.get_info("traffic/written", None)
+
+    if write_total and write_total.isdigit():
+      self.initial_secondary_total = int(write_total) / 1024  # Bytes -> KB
+
+  def clone(self, new_copy = None):
+    if not new_copy:
+      new_copy = BandwidthStats(True)
+
+    new_copy.accounting_last_updated = self.accounting_last_updated
+    new_copy.accounting_info = self.accounting_info
+
+    # attributes that would have been initialized from calling the reset_listener
+
+    new_copy.is_accounting = self.is_accounting
+    new_copy._title_stats = self._title_stats
+
+    return graph_panel.GraphStats.clone(self, new_copy)
+
+  def reset_listener(self, controller, event_type, _):
+    # updates title parameters and accounting status if they changed
+
+    self._title_stats = []     # force reset of title
+    self.new_desc_event(None)  # updates title params
+
+    if event_type in (State.INIT, State.RESET) and CONFIG["features.graph.bw.accounting.show"]:
+      is_accounting_enabled = controller.get_info('accounting/enabled', None) == '1'
+
+      if is_accounting_enabled != self.is_accounting:
+        self.is_accounting = is_accounting_enabled
+
+        # redraws the whole screen since our height changed
+
+        arm.controller.get_controller().redraw()
+
+    # redraws to reflect changes (this especially noticeable when we have
+    # accounting and shut down since it then gives notice of the shutdown)
+
+    if self._graph_panel and self.is_selected:
+      self._graph_panel.redraw(True)
+
+  def prepopulate_from_state(self):
+    """
+    Attempts to use tor's state file to prepopulate values for the 15 minute
+    interval via the BWHistoryReadValues/BWHistoryWriteValues values. This
+    returns True if successful and False otherwise.
+    """
+
+    # checks that this is a relay (if ORPort is unset, then skip)
+
+    conn = tor_tools.get_conn()
+    or_port = conn.get_option("ORPort", None)
+
+    if or_port == "0":
+      return
+
+    # gets the uptime (using the same parameters as the header panel to take
+    # advantage of caching)
+    # TODO: stem dropped system caching support so we'll need to think of
+    # something else
+
+    uptime = None
+    query_pid = conn.controller.get_pid(None)
+
+    if query_pid:
+      query_param = ["%cpu", "rss", "%mem", "etime"]
+      query_cmd = "ps -p %s -o %s" % (query_pid, ",".join(query_param))
+      ps_call = system.call(query_cmd, None)
+
+      if ps_call and len(ps_call) == 2:
+        stats = ps_call[1].strip().split()
+
+        if len(stats) == 4:
+          uptime = stats[3]
+
+    # checks if tor has been running for at least a day, the reason being that
+    # the state tracks a day's worth of data and this should only prepopulate
+    # results associated with this tor instance
+
+    if not uptime or not "-" in uptime:
+      msg = PREPOPULATE_FAILURE_MSG % "insufficient uptime"
+      log.notice(msg)
+      return False
+
+    # get the user's data directory (usually '~/.tor')
+
+    data_dir = conn.get_option("DataDirectory", None)
+
+    if not data_dir:
+      msg = PREPOPULATE_FAILURE_MSG % "data directory not found"
+      log.notice(msg)
+      return False
+
+    # attempt to open the state file
+
+    try:
+      state_file = open("%s%s/state" % (CONFIG['tor.chroot'], data_dir), "r")
+    except IOError:
+      msg = PREPOPULATE_FAILURE_MSG % "unable to read the state file"
+      log.notice(msg)
+      return False
+
+    # get the BWHistory entries (ordered oldest to newest) and number of
+    # intervals since last recorded
+
+    bw_read_entries, bw_write_entries = None, None
+    missing_read_entries, missing_write_entries = None, None
+
+    # converts from gmt to local with respect to DST
+
+    tz_offset = time.altzone if time.localtime()[8] else time.timezone
+
+    for line in state_file:
+      line = line.strip()
+
+      # According to the rep_hist_update_state() function the BWHistory*Ends
+      # correspond to the start of the following sampling period. Also, the
+      # most recent values of BWHistory*Values appear to be an incremental
+      # counter for the current sampling period. Hence, offsets are added to
+      # account for both.
+
+      if line.startswith("BWHistoryReadValues"):
+        bw_read_entries = line[20:].split(",")
+        bw_read_entries = [int(entry) / 1024.0 / 900 for entry in bw_read_entries]
+        bw_read_entries.pop()
+      elif line.startswith("BWHistoryWriteValues"):
+        bw_write_entries = line[21:].split(",")
+        bw_write_entries = [int(entry) / 1024.0 / 900 for entry in bw_write_entries]
+        bw_write_entries.pop()
+      elif line.startswith("BWHistoryReadEnds"):
+        last_read_time = time.mktime(time.strptime(line[18:], "%Y-%m-%d %H:%M:%S")) - tz_offset
+        last_read_time -= 900
+        missing_read_entries = int((time.time() - last_read_time) / 900)
+      elif line.startswith("BWHistoryWriteEnds"):
+        last_write_time = time.mktime(time.strptime(line[19:], "%Y-%m-%d %H:%M:%S")) - tz_offset
+        last_write_time -= 900
+        missing_write_entries = int((time.time() - last_write_time) / 900)
+
+    if not bw_read_entries or not bw_write_entries or not last_read_time or not last_write_time:
+      msg = PREPOPULATE_FAILURE_MSG % "bandwidth stats missing from state file"
+      log.notice(msg)
+      return False
+
+    # fills missing entries with the last value
+
+    bw_read_entries += [bw_read_entries[-1]] * missing_read_entries
+    bw_write_entries += [bw_write_entries[-1]] * missing_write_entries
+
+    # crops starting entries so they're the same size
+
+    entry_count = min(len(bw_read_entries), len(bw_write_entries), self.max_column)
+    bw_read_entries = bw_read_entries[len(bw_read_entries) - entry_count:]
+    bw_write_entries = bw_write_entries[len(bw_write_entries) - entry_count:]
+
+    # gets index for 15-minute interval
+
+    interval_index = 0
+
+    for index_entry in graph_panel.UPDATE_INTERVALS:
+      if index_entry[1] == 900:
+        break
+      else:
+        interval_index += 1
+
+    # fills the graphing parameters with state information
+
+    for i in range(entry_count):
+      read_value, write_value = bw_read_entries[i], bw_write_entries[i]
+
+      self.last_primary, self.last_secondary = read_value, write_value
+
+      self.prepopulate_primary_total += read_value * 900
+      self.prepopulate_secondary_total += write_value * 900
+      self.prepopulate_ticks += 900
+
+      self.primary_counts[interval_index].insert(0, read_value)
+      self.secondary_counts[interval_index].insert(0, write_value)
+
+    self.max_primary[interval_index] = max(self.primary_counts)
+    self.max_secondary[interval_index] = max(self.secondary_counts)
+
+    del self.primary_counts[interval_index][self.max_column + 1:]
+    del self.secondary_counts[interval_index][self.max_column + 1:]
+
+    msg = PREPOPULATE_SUCCESS_MSG
+    missing_sec = time.time() - min(last_read_time, last_write_time)
+
+    if missing_sec:
+      msg += " (%s is missing)" % str_tools.get_time_label(missing_sec, 0, True)
+
+    log.notice(msg)
+
+    return True
+
+  def bandwidth_event(self, event):
+    if self.is_accounting and self.is_next_tick_redraw():
+      if time.time() - self.accounting_last_updated >= CONFIG["features.graph.bw.accounting.rate"]:
+        self._update_accounting_info()
+
+    # scales units from B to KB for graphing
+
+    self._process_event(event.read / 1024.0, event.written / 1024.0)
+
+  def draw(self, panel, width, height):
+    # line of the graph's x-axis labeling
+
+    labeling_line = graph_panel.GraphStats.get_content_height(self) + panel.graph_height - 2
+
+    # if display is narrow, overwrites x-axis labels with avg / total stats
+
+    if width <= COLLAPSE_WIDTH:
+      # clears line
+
+      panel.addstr(labeling_line, 0, " " * width)
+      graph_column = min((width - 10) / 2, self.max_column)
+
+      primary_footer = "%s, %s" % (self._get_avg_label(True), self._get_total_label(True))
+      secondary_footer = "%s, %s" % (self._get_avg_label(False), self._get_total_label(False))
+
+      panel.addstr(labeling_line, 1, primary_footer, ui_tools.get_color(self.get_color(True)))
+      panel.addstr(labeling_line, graph_column + 6, secondary_footer, ui_tools.get_color(self.get_color(False)))
+
+    # provides accounting stats if enabled
+
+    if self.is_accounting:
+      if tor_tools.get_conn().is_alive():
+        status = self.accounting_info["status"]
+
+        hibernate_color = "green"
+
+        if status == "soft":
+          hibernate_color = "yellow"
+        elif status == "hard":
+          hibernate_color = "red"
+        elif status == "":
+          # failed to be queried
+          status, hibernate_color = "unknown", "red"
+
+        panel.addstr(labeling_line + 2, 0, "Accounting (", curses.A_BOLD)
+        panel.addstr(labeling_line + 2, 12, status, curses.A_BOLD | ui_tools.get_color(hibernate_color))
+        panel.addstr(labeling_line + 2, 12 + len(status), ")", curses.A_BOLD)
+
+        reset_time = self.accounting_info["reset_time"]
+
+        if not reset_time:
+          reset_time = "unknown"
+
+        panel.addstr(labeling_line + 2, 35, "Time to reset: %s" % reset_time)
+
+        used, total = self.accounting_info["read"], self.accounting_info["read_limit"]
+
+        if used and total:
+          panel.addstr(labeling_line + 3, 2, "%s / %s" % (used, total), ui_tools.get_color(self.get_color(True)))
+
+        used, total = self.accounting_info["written"], self.accounting_info["writtenLimit"]
+
+        if used and total:
+          panel.addstr(labeling_line + 3, 37, "%s / %s" % (used, total), ui_tools.get_color(self.get_color(False)))
+      else:
+        panel.addstr(labeling_line + 2, 0, "Accounting:", curses.A_BOLD)
+        panel.addstr(labeling_line + 2, 12, "Connection Closed...")
+
+  def get_title(self, width):
+    stats = list(self._title_stats)
+
+    while True:
+      if not stats:
+        return "Bandwidth:"
+      else:
+        label = "Bandwidth (%s):" % ", ".join(stats)
+
+        if len(label) > width:
+          del stats[-1]
+        else:
+          return label
+
+  def get_header_label(self, width, is_primary):
+    graph_type = "Download" if is_primary else "Upload"
+    stats = [""]
+
+    # if wide then avg and total are part of the header, otherwise they're on
+    # the x-axis
+
+    if width * 2 > COLLAPSE_WIDTH:
+      stats = [""] * 3
+      stats[1] = "- %s" % self._get_avg_label(is_primary)
+      stats[2] = ", %s" % self._get_total_label(is_primary)
+
+    stats[0] = "%-14s" % ("%s/sec" % str_tools.get_size_label((self.last_primary if is_primary else self.last_secondary) * 1024, 1, False, CONFIG["features.graph.bw.transferInBytes"]))
+
+    # drops label's components if there's not enough space
+
+    labeling = graph_type + " (" + "".join(stats).strip() + "):"
+
+    while len(labeling) >= width:
+      if len(stats) > 1:
+        del stats[-1]
+        labeling = graph_type + " (" + "".join(stats).strip() + "):"
+      else:
+        labeling = graph_type + ":"
+        break
+
+    return labeling
+
+  def get_color(self, is_primary):
+    return DL_COLOR if is_primary else UL_COLOR
+
+  def get_content_height(self):
+    base_height = graph_panel.GraphStats.get_content_height(self)
+    return base_height + 3 if self.is_accounting else base_height
+
+  def new_desc_event(self, event):
+    # updates self._title_stats with updated values
+
+    conn = tor_tools.get_conn()
+
+    if not conn.is_alive():
+      return  # keep old values
+
+    my_fingerprint = conn.get_info("fingerprint", None)
+
+    if not self._title_stats or not my_fingerprint or (event and my_fingerprint in event.idlist):
+      stats = []
+      bw_rate = conn.get_my_bandwidth_rate()
+      bw_burst = conn.get_my_bandwidth_burst()
+      bw_observed = conn.get_my_bandwidth_observed()
+      bw_measured = conn.get_my_bandwidth_measured()
+      label_in_bytes = CONFIG["features.graph.bw.transferInBytes"]
+
+      if bw_rate and bw_burst:
+        bw_rate_label = str_tools.get_size_label(bw_rate, 1, False, label_in_bytes)
+        bw_burst_label = str_tools.get_size_label(bw_burst, 1, False, label_in_bytes)
+
+        # if both are using rounded values then strip off the ".0" decimal
+
+        if ".0" in bw_rate_label and ".0" in bw_burst_label:
+          bw_rate_label = bw_rate_label.replace(".0", "")
+          bw_burst_label = bw_burst_label.replace(".0", "")
+
+        stats.append("limit: %s/s" % bw_rate_label)
+        stats.append("burst: %s/s" % bw_burst_label)
+
+      # Provide the observed bandwidth either if the measured bandwidth isn't
+      # available or if the measured bandwidth is the observed (this happens
+      # if there isn't yet enough bandwidth measurements).
+
+      if bw_observed and (not bw_measured or bw_measured == bw_observed):
+        stats.append("observed: %s/s" % str_tools.get_size_label(bw_observed, 1, False, label_in_bytes))
+      elif bw_measured:
+        stats.append("measured: %s/s" % str_tools.get_size_label(bw_measured, 1, False, label_in_bytes))
+
+      self._title_stats = stats
+
+  def _get_avg_label(self, is_primary):
+    total = self.primary_total if is_primary else self.secondary_total
+    total += self.prepopulate_primary_total if is_primary else self.prepopulate_secondary_total
+
+    return "avg: %s/sec" % str_tools.get_size_label((total / max(1, self.tick + self.prepopulate_ticks)) * 1024, 1, False, CONFIG["features.graph.bw.transferInBytes"])
+
+  def _get_total_label(self, is_primary):
+    total = self.primary_total if is_primary else self.secondary_total
+    total += self.initial_primary_total if is_primary else self.initial_secondary_total
+    return "total: %s" % str_tools.get_size_label(total * 1024, 1)
+
+  def _update_accounting_info(self):
+    """
+    Updates mapping used for accounting info. This includes the following keys:
+    status, reset_time, read, written, read_limit, writtenLimit
+
+    Any failed lookups result in a mapping to an empty string.
+    """
+
+    conn = tor_tools.get_conn()
+    queried = dict([(arg, "") for arg in ACCOUNTING_ARGS])
+    queried["status"] = conn.get_info("accounting/hibernating", None)
+
+    # provides a nicely formatted reset time
+
+    end_interval = conn.get_info("accounting/interval-end", None)
+
+    if end_interval:
+      # converts from gmt to local with respect to DST
+
+      if time.localtime()[8]:
+        tz_offset = time.altzone
+      else:
+        tz_offset = time.timezone
+
+      sec = time.mktime(time.strptime(end_interval, "%Y-%m-%d %H:%M:%S")) - time.time() - tz_offset
+
+      if CONFIG["features.graph.bw.accounting.isTimeLong"]:
+        queried["reset_time"] = ", ".join(str_tools.get_time_labels(sec, True))
+      else:
+        days = sec / 86400
+        sec %= 86400
+        hours = sec / 3600
+        sec %= 3600
+        minutes = sec / 60
+        sec %= 60
+        queried["reset_time"] = "%i:%02i:%02i:%02i" % (days, hours, minutes, sec)
+
+    # number of bytes used and in total for the accounting period
+
+    used = conn.get_info("accounting/bytes", None)
+    left = conn.get_info("accounting/bytes-left", None)
+
+    if used and left:
+      used_comp, left_comp = used.split(" "), left.split(" ")
+      read, written = int(used_comp[0]), int(used_comp[1])
+      read_left, written_left = int(left_comp[0]), int(left_comp[1])
+
+      queried["read"] = str_tools.get_size_label(read)
+      queried["written"] = str_tools.get_size_label(written)
+      queried["read_limit"] = str_tools.get_size_label(read + read_left)
+      queried["writtenLimit"] = str_tools.get_size_label(written + written_left)
+
+    self.accounting_info = queried
+    self.accounting_last_updated = time.time()
diff --git a/arm/graphing/connStats.py b/arm/graphing/connStats.py
deleted file mode 100644
index 068fd0a..0000000
--- a/arm/graphing/connStats.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""
-Tracks stats concerning tor's current connections.
-"""
-
-import arm.util.tracker
-
-from arm.graphing import graphPanel
-from arm.util import torTools
-
-from stem.control import State
-
-
-class ConnStats(graphPanel.GraphStats):
-  """
-  Tracks number of connections, counting client and directory connections as
-  outbound. Control connections are excluded from counts.
-  """
-
-  def __init__(self):
-    graphPanel.GraphStats.__init__(self)
-
-    # listens for tor reload (sighup) events which can reset the ports tor uses
-
-    conn = torTools.get_conn()
-    self.or_port, self.dir_port, self.control_port = "0", "0", "0"
-    self.reset_listener(conn.get_controller(), State.INIT, None)  # initialize port values
-    conn.add_status_listener(self.reset_listener)
-
-  def clone(self, new_copy=None):
-    if not new_copy:
-      new_copy = ConnStats()
-
-    return graphPanel.GraphStats.clone(self, new_copy)
-
-  def reset_listener(self, controller, event_type, _):
-    if event_type in (State.INIT, State.RESET):
-      self.or_port = controller.get_conf("ORPort", "0")
-      self.dir_port = controller.get_conf("DirPort", "0")
-      self.control_port = controller.get_conf("ControlPort", "0")
-
-  def event_tick(self):
-    """
-    Fetches connection stats from cached information.
-    """
-
-    inbound_count, outbound_count = 0, 0
-
-    for entry in arm.util.tracker.get_connection_tracker().get_connections():
-      local_port = entry.local_port
-
-      if local_port in (self.or_port, self.dir_port):
-        inbound_count += 1
-      elif local_port == self.control_port:
-        pass  # control connection
-      else:
-        outbound_count += 1
-
-    self._process_event(inbound_count, outbound_count)
-
-  def get_title(self, width):
-    return "Connection Count:"
-
-  def get_header_label(self, width, is_primary):
-    avg = (self.primary_total if is_primary else self.secondary_total) / max(1, self.tick)
-
-    if is_primary:
-      return "Inbound (%s, avg: %s):" % (self.last_primary, avg)
-    else:
-      return "Outbound (%s, avg: %s):" % (self.last_secondary, avg)
-
-  def get_refresh_rate(self):
-    return 5
diff --git a/arm/graphing/conn_stats.py b/arm/graphing/conn_stats.py
new file mode 100644
index 0000000..6694647
--- /dev/null
+++ b/arm/graphing/conn_stats.py
@@ -0,0 +1,72 @@
+"""
+Tracks stats concerning tor's current connections.
+"""
+
+import arm.util.tracker
+
+from arm.graphing import graph_panel
+from arm.util import tor_tools
+
+from stem.control import State
+
+
+class ConnStats(graph_panel.GraphStats):
+  """
+  Tracks number of connections, counting client and directory connections as
+  outbound. Control connections are excluded from counts.
+  """
+
+  def __init__(self):
+    graph_panel.GraphStats.__init__(self)
+
+    # listens for tor reload (sighup) events which can reset the ports tor uses
+
+    conn = tor_tools.get_conn()
+    self.or_port, self.dir_port, self.control_port = "0", "0", "0"
+    self.reset_listener(conn.get_controller(), State.INIT, None)  # initialize port values
+    conn.add_status_listener(self.reset_listener)
+
+  def clone(self, new_copy=None):
+    if not new_copy:
+      new_copy = ConnStats()
+
+    return graph_panel.GraphStats.clone(self, new_copy)
+
+  def reset_listener(self, controller, event_type, _):
+    if event_type in (State.INIT, State.RESET):
+      self.or_port = controller.get_conf("ORPort", "0")
+      self.dir_port = controller.get_conf("DirPort", "0")
+      self.control_port = controller.get_conf("ControlPort", "0")
+
+  def event_tick(self):
+    """
+    Fetches connection stats from cached information.
+    """
+
+    inbound_count, outbound_count = 0, 0
+
+    for entry in arm.util.tracker.get_connection_tracker().get_connections():
+      local_port = entry.local_port
+
+      if local_port in (self.or_port, self.dir_port):
+        inbound_count += 1
+      elif local_port == self.control_port:
+        pass  # control connection
+      else:
+        outbound_count += 1
+
+    self._process_event(inbound_count, outbound_count)
+
+  def get_title(self, width):
+    return "Connection Count:"
+
+  def get_header_label(self, width, is_primary):
+    avg = (self.primary_total if is_primary else self.secondary_total) / max(1, self.tick)
+
+    if is_primary:
+      return "Inbound (%s, avg: %s):" % (self.last_primary, avg)
+    else:
+      return "Outbound (%s, avg: %s):" % (self.last_secondary, avg)
+
+  def get_refresh_rate(self):
+    return 5
diff --git a/arm/graphing/graphPanel.py b/arm/graphing/graphPanel.py
deleted file mode 100644
index aeb4cff..0000000
--- a/arm/graphing/graphPanel.py
+++ /dev/null
@@ -1,596 +0,0 @@
-"""
-Flexible panel for presenting bar graphs for a variety of stats. This panel is
-just concerned with the rendering of information, which is actually collected
-and stored by implementations of the GraphStats interface. Panels are made up
-of a title, followed by headers and graphs for two sets of stats. For
-instance...
-
-Bandwidth (cap: 5 MB, burst: 10 MB):
-Downloaded (0.0 B/sec):           Uploaded (0.0 B/sec):
-  34                                30
-                            *                                 *
-                    **  *   *                          *      **
-      *   *  *      ** **   **          ***  **       ** **   **
-     *********      ******  ******     *********      ******  ******
-   0 ************ ****************   0 ************ ****************
-         25s  50   1m   1.6  2.0           25s  50   1m   1.6  2.0
-"""
-
-import copy
-import curses
-
-import arm.popups
-import arm.controller
-
-import stem.control
-
-from arm.util import panel, torTools, uiTools
-
-from stem.util import conf, enum, str_tools
-
-# time intervals at which graphs can be updated
-
-UPDATE_INTERVALS = [
-  ("each second", 1),
-  ("5 seconds", 5),
-  ("30 seconds", 30),
-  ("minutely", 60),
-  ("15 minute", 900),
-  ("30 minute", 1800),
-  ("hourly", 3600),
-  ("daily", 86400),
-]
-
-DEFAULT_CONTENT_HEIGHT = 4  # space needed for labeling above and below the graph
-DEFAULT_COLOR_PRIMARY, DEFAULT_COLOR_SECONDARY = "green", "cyan"
-MIN_GRAPH_HEIGHT = 1
-
-# enums for graph bounds:
-#   Bounds.GLOBAL_MAX - global maximum (highest value ever seen)
-#   Bounds.LOCAL_MAX - local maximum (highest value currently on the graph)
-#   Bounds.TIGHT - local maximum and minimum
-
-Bounds = enum.Enum("GLOBAL_MAX", "LOCAL_MAX", "TIGHT")
-
-WIDE_LABELING_GRAPH_COL = 50  # minimum graph columns to use wide spacing for x-axis labels
-
-
-def conf_handler(key, value):
-  if key == "features.graph.height":
-    return max(MIN_GRAPH_HEIGHT, value)
-  elif key == "features.graph.max_width":
-    return max(1, value)
-  elif key == "features.graph.interval":
-    return max(0, min(len(UPDATE_INTERVALS) - 1, value))
-  elif key == "features.graph.bound":
-    return max(0, min(2, value))
-
-
-# used for setting defaults when initializing GraphStats and GraphPanel instances
-
-CONFIG = conf.config_dict("arm", {
-  "features.graph.height": 7,
-  "features.graph.interval": 0,
-  "features.graph.bound": 1,
-  "features.graph.max_width": 150,
-  "features.graph.showIntermediateBounds": True,
-}, conf_handler)
-
-
-class GraphStats:
-  """
-  Module that's expected to update dynamically and provide attributes to be
-  graphed. Up to two graphs (a 'primary' and 'secondary') can be displayed at a
-  time and timescale parameters use the labels defined in UPDATE_INTERVALS.
-  """
-
-  def __init__(self):
-    """
-    Initializes parameters needed to present a graph.
-    """
-
-    # panel to be redrawn when updated (set when added to GraphPanel)
-
-    self._graph_panel = None
-    self.is_selected = False
-    self.is_pause_buffer = False
-
-    # tracked stats
-
-    self.tick = 0                                    # number of processed events
-    self.last_primary, self.last_secondary = 0, 0    # most recent registered stats
-    self.primary_total, self.secondary_total = 0, 0  # sum of all stats seen
-
-    # timescale dependent stats
-
-    self.max_column = CONFIG["features.graph.max_width"]
-    self.max_primary, self.max_secondary = {}, {}
-    self.primary_counts, self.secondary_counts = {}, {}
-
-    for i in range(len(UPDATE_INTERVALS)):
-      # recent rates for graph
-
-      self.max_primary[i] = 0
-      self.max_secondary[i] = 0
-
-      # historic stats for graph, first is accumulator
-      # iterative insert needed to avoid making shallow copies (nasty, nasty gotcha)
-
-      self.primary_counts[i] = (self.max_column + 1) * [0]
-      self.secondary_counts[i] = (self.max_column + 1) * [0]
-
-    # tracks BW events
-
-    torTools.get_conn().add_event_listener(self.bandwidth_event, stem.control.EventType.BW)
-
-  def clone(self, new_copy=None):
-    """
-    Provides a deep copy of this instance.
-
-    Arguments:
-      new_copy - base instance to build copy off of
-    """
-
-    if not new_copy:
-      new_copy = GraphStats()
-
-    new_copy.tick = self.tick
-    new_copy.last_primary = self.last_primary
-    new_copy.last_secondary = self.last_secondary
-    new_copy.primary_total = self.primary_total
-    new_copy.secondary_total = self.secondary_total
-    new_copy.max_primary = dict(self.max_primary)
-    new_copy.max_secondary = dict(self.max_secondary)
-    new_copy.primary_counts = copy.deepcopy(self.primary_counts)
-    new_copy.secondary_counts = copy.deepcopy(self.secondary_counts)
-    new_copy.is_pause_buffer = True
-    return new_copy
-
-  def event_tick(self):
-    """
-    Called when it's time to process another event. All graphs use tor BW
-    events to keep in sync with each other (this happens once a second).
-    """
-
-    pass
-
-  def is_next_tick_redraw(self):
-    """
-    Provides true if the following tick (call to _process_event) will result in
-    being redrawn.
-    """
-
-    if self._graph_panel and self.is_selected and not self._graph_panel.is_paused():
-      # use the minimum of the current refresh rate and the panel's
-      update_rate = UPDATE_INTERVALS[self._graph_panel.update_interval][1]
-      return (self.tick + 1) % min(update_rate, self.get_refresh_rate()) == 0
-    else:
-      return False
-
-  def get_title(self, width):
-    """
-    Provides top label.
-    """
-
-    return ""
-
-  def get_header_label(self, width, is_primary):
-    """
-    Provides labeling presented at the top of the graph.
-    """
-
-    return ""
-
-  def get_color(self, is_primary):
-    """
-    Provides the color to be used for the graph and stats.
-    """
-
-    return DEFAULT_COLOR_PRIMARY if is_primary else DEFAULT_COLOR_SECONDARY
-
-  def get_content_height(self):
-    """
-    Provides the height content should take up (not including the graph).
-    """
-
-    return DEFAULT_CONTENT_HEIGHT
-
-  def get_refresh_rate(self):
-    """
-    Provides the number of ticks between when the stats have new values to be
-    redrawn.
-    """
-
-    return 1
-
-  def is_visible(self):
-    """
-    True if the stat has content to present, false if it should be hidden.
-    """
-
-    return True
-
-  def draw(self, panel, width, height):
-    """
-    Allows for any custom drawing monitor wishes to append.
-    """
-
-    pass
-
-  def bandwidth_event(self, event):
-    if not self.is_pause_buffer:
-      self.event_tick()
-
-  def _process_event(self, primary, secondary):
-    """
-    Includes new stats in graphs and notifies associated GraphPanel of changes.
-    """
-
-    is_redraw = self.is_next_tick_redraw()
-
-    self.last_primary, self.last_secondary = primary, secondary
-    self.primary_total += primary
-    self.secondary_total += secondary
-
-    # updates for all time intervals
-
-    self.tick += 1
-
-    for i in range(len(UPDATE_INTERVALS)):
-      lable, timescale = UPDATE_INTERVALS[i]
-
-      self.primary_counts[i][0] += primary
-      self.secondary_counts[i][0] += secondary
-
-      if self.tick % timescale == 0:
-        self.max_primary[i] = max(self.max_primary[i], self.primary_counts[i][0] / timescale)
-        self.primary_counts[i][0] /= timescale
-        self.primary_counts[i].insert(0, 0)
-        del self.primary_counts[i][self.max_column + 1:]
-
-        self.max_secondary[i] = max(self.max_secondary[i], self.secondary_counts[i][0] / timescale)
-        self.secondary_counts[i][0] /= timescale
-        self.secondary_counts[i].insert(0, 0)
-        del self.secondary_counts[i][self.max_column + 1:]
-
-    if is_redraw and self._graph_panel:
-      self._graph_panel.redraw(True)
-
-
-class GraphPanel(panel.Panel):
-  """
-  Panel displaying a graph, drawing statistics from custom GraphStats
-  implementations.
-  """
-
-  def __init__(self, stdscr):
-    panel.Panel.__init__(self, stdscr, "graph", 0)
-    self.update_interval = CONFIG["features.graph.interval"]
-    self.bounds = list(Bounds)[CONFIG["features.graph.bound"]]
-    self.graph_height = CONFIG["features.graph.height"]
-    self.current_display = None    # label of the stats currently being displayed
-    self.stats = {}                # available stats (mappings of label -> instance)
-    self.set_pause_attr("stats")
-
-  def get_update_interval(self):
-    """
-    Provides the rate that we update the graph at.
-    """
-
-    return self.update_interval
-
-  def set_update_interval(self, update_interval):
-    """
-    Sets the rate that we update the graph at.
-
-    Arguments:
-      update_interval - update time enum
-    """
-
-    self.update_interval = update_interval
-
-  def get_bounds_type(self):
-    """
-    Provides the type of graph bounds used.
-    """
-
-    return self.bounds
-
-  def set_bounds_type(self, bounds_type):
-    """
-    Sets the type of graph boundaries we use.
-
-    Arguments:
-      bounds_type - graph bounds enum
-    """
-
-    self.bounds = bounds_type
-
-  def get_height(self):
-    """
-    Provides the height requested by the currently displayed GraphStats (zero
-    if hidden).
-    """
-
-    if self.current_display and self.stats[self.current_display].is_visible():
-      return self.stats[self.current_display].get_content_height() + self.graph_height
-    else:
-      return 0
-
-  def set_graph_height(self, new_graph_height):
-    """
-    Sets the preferred height used for the graph (restricted to the
-    MIN_GRAPH_HEIGHT minimum).
-
-    Arguments:
-      new_graph_height - new height for the graph
-    """
-
-    self.graph_height = max(MIN_GRAPH_HEIGHT, new_graph_height)
-
-  def resize_graph(self):
-    """
-    Prompts for user input to resize the graph panel. Options include...
-      down arrow - grow graph
-      up arrow - shrink graph
-      enter / space - set size
-    """
-
-    control = arm.controller.get_controller()
-
-    panel.CURSES_LOCK.acquire()
-
-    try:
-      while True:
-        msg = "press the down/up to resize the graph, and enter when done"
-        control.set_msg(msg, curses.A_BOLD, True)
-        curses.cbreak()
-        key = control.get_screen().getch()
-
-        if key == curses.KEY_DOWN:
-          # don't grow the graph if it's already consuming the whole display
-          # (plus an extra line for the graph/log gap)
-
-          max_height = self.parent.getmaxyx()[0] - self.top
-          current_height = self.get_height()
-
-          if current_height < max_height + 1:
-            self.set_graph_height(self.graph_height + 1)
-        elif key == curses.KEY_UP:
-          self.set_graph_height(self.graph_height - 1)
-        elif uiTools.is_selection_key(key):
-          break
-
-        control.redraw()
-    finally:
-      control.set_msg()
-      panel.CURSES_LOCK.release()
-
-  def handle_key(self, key):
-    is_keystroke_consumed = True
-
-    if key == ord('r') or key == ord('R'):
-      self.resize_graph()
-    elif key == ord('b') or key == ord('B'):
-      # uses the next boundary type
-      self.bounds = Bounds.next(self.bounds)
-      self.redraw(True)
-    elif key == ord('s') or key == ord('S'):
-      # provides a menu to pick the graphed stats
-
-      available_stats = self.stats.keys()
-      available_stats.sort()
-
-      # uses sorted, camel cased labels for the options
-
-      options = ["None"]
-
-      for label in available_stats:
-        words = label.split()
-        options.append(" ".join(word[0].upper() + word[1:] for word in words))
-
-      if self.current_display:
-        initial_selection = available_stats.index(self.current_display) + 1
-      else:
-        initial_selection = 0
-
-      selection = arm.popups.show_menu("Graphed Stats:", options, initial_selection)
-
-      # applies new setting
-
-      if selection == 0:
-        self.set_stats(None)
-      elif selection != -1:
-        self.set_stats(available_stats[selection - 1])
-    elif key == ord('i') or key == ord('I'):
-      # provides menu to pick graph panel update interval
-
-      options = [label for (label, _) in UPDATE_INTERVALS]
-      selection = arm.popups.show_menu("Update Interval:", options, self.update_interval)
-
-      if selection != -1:
-        self.update_interval = selection
-    else:
-      is_keystroke_consumed = False
-
-    return is_keystroke_consumed
-
-  def get_help(self):
-    if self.current_display:
-      graphed_stats = self.current_display
-    else:
-      graphed_stats = "none"
-
-    options = []
-    options.append(("r", "resize graph", None))
-    options.append(("s", "graphed stats", graphed_stats))
-    options.append(("b", "graph bounds", self.bounds.lower()))
-    options.append(("i", "graph update interval", UPDATE_INTERVALS[self.update_interval][0]))
-    return options
-
-  def draw(self, width, height):
-    """ Redraws graph panel """
-
-    if self.current_display:
-      param = self.get_attr("stats")[self.current_display]
-      graph_column = min((width - 10) / 2, param.max_column)
-
-      primary_color = uiTools.get_color(param.get_color(True))
-      secondary_color = uiTools.get_color(param.get_color(False))
-
-      if self.is_title_visible():
-        self.addstr(0, 0, param.get_title(width), curses.A_STANDOUT)
-
-      # top labels
-
-      left, right = param.get_header_label(width / 2, True), param.get_header_label(width / 2, False)
-
-      if left:
-        self.addstr(1, 0, left, curses.A_BOLD | primary_color)
-
-      if right:
-        self.addstr(1, graph_column + 5, right, curses.A_BOLD | secondary_color)
-
-      # determines max/min value on the graph
-
-      if self.bounds == Bounds.GLOBAL_MAX:
-        primary_max_bound = int(param.max_primary[self.update_interval])
-        secondary_max_bound = int(param.max_secondary[self.update_interval])
-      else:
-        # both Bounds.LOCAL_MAX and Bounds.TIGHT use local maxima
-        if graph_column < 2:
-          # nothing being displayed
-          primary_max_bound, secondary_max_bound = 0, 0
-        else:
-          primary_max_bound = int(max(param.primary_counts[self.update_interval][1:graph_column + 1]))
-          secondary_max_bound = int(max(param.secondary_counts[self.update_interval][1:graph_column + 1]))
-
-      primary_min_bound = secondary_min_bound = 0
-
-      if self.bounds == Bounds.TIGHT:
-        primary_min_bound = int(min(param.primary_counts[self.update_interval][1:graph_column + 1]))
-        secondary_min_bound = int(min(param.secondary_counts[self.update_interval][1:graph_column + 1]))
-
-        # if the max = min (ie, all values are the same) then use zero lower
-        # bound so a graph is still displayed
-
-        if primary_min_bound == primary_max_bound:
-          primary_min_bound = 0
-
-        if secondary_min_bound == secondary_max_bound:
-          secondary_min_bound = 0
-
-      # displays upper and lower bounds
-
-      self.addstr(2, 0, "%4i" % primary_max_bound, primary_color)
-      self.addstr(self.graph_height + 1, 0, "%4i" % primary_min_bound, primary_color)
-
-      self.addstr(2, graph_column + 5, "%4i" % secondary_max_bound, secondary_color)
-      self.addstr(self.graph_height + 1, graph_column + 5, "%4i" % secondary_min_bound, secondary_color)
-
-      # displays intermediate bounds on every other row
-
-      if CONFIG["features.graph.showIntermediateBounds"]:
-        ticks = (self.graph_height - 3) / 2
-
-        for i in range(ticks):
-          row = self.graph_height - (2 * i) - 3
-
-          if self.graph_height % 2 == 0 and i >= (ticks / 2):
-            row -= 1
-
-          if primary_min_bound != primary_max_bound:
-            primary_val = (primary_max_bound - primary_min_bound) * (self.graph_height - row - 1) / (self.graph_height - 1)
-
-            if not primary_val in (primary_min_bound, primary_max_bound):
-              self.addstr(row + 2, 0, "%4i" % primary_val, primary_color)
-
-          if secondary_min_bound != secondary_max_bound:
-            secondary_val = (secondary_max_bound - secondary_min_bound) * (self.graph_height - row - 1) / (self.graph_height - 1)
-
-            if not secondary_val in (secondary_min_bound, secondary_max_bound):
-              self.addstr(row + 2, graph_column + 5, "%4i" % secondary_val, secondary_color)
-
-      # creates bar graph (both primary and secondary)
-
-      for col in range(graph_column):
-        column_count = int(param.primary_counts[self.update_interval][col + 1]) - primary_min_bound
-        column_height = min(self.graph_height, self.graph_height * column_count / (max(1, primary_max_bound) - primary_min_bound))
-
-        for row in range(column_height):
-          self.addstr(self.graph_height + 1 - row, col + 5, " ", curses.A_STANDOUT | primary_color)
-
-        column_count = int(param.secondary_counts[self.update_interval][col + 1]) - secondary_min_bound
-        column_height = min(self.graph_height, self.graph_height * column_count / (max(1, secondary_max_bound) - secondary_min_bound))
-
-        for row in range(column_height):
-          self.addstr(self.graph_height + 1 - row, col + graph_column + 10, " ", curses.A_STANDOUT | secondary_color)
-
-      # bottom labeling of x-axis
-
-      interval_sec = 1  # seconds per labeling
-
-      for i in range(len(UPDATE_INTERVALS)):
-        if i == self.update_interval:
-          interval_sec = UPDATE_INTERVALS[i][1]
-
-      interval_spacing = 10 if graph_column >= WIDE_LABELING_GRAPH_COL else 5
-      units_label, decimal_precision = None, 0
-
-      for i in range((graph_column - 4) / interval_spacing):
-        loc = (i + 1) * interval_spacing
-        time_label = str_tools.get_time_label(loc * interval_sec, decimal_precision)
-
-        if not units_label:
-          units_label = time_label[-1]
-        elif units_label != time_label[-1]:
-          # upped scale so also up precision of future measurements
-          units_label = time_label[-1]
-          decimal_precision += 1
-        else:
-          # if constrained on space then strips labeling since already provided
-          time_label = time_label[:-1]
-
-        self.addstr(self.graph_height + 2, 4 + loc, time_label, primary_color)
-        self.addstr(self.graph_height + 2, graph_column + 10 + loc, time_label, secondary_color)
-
-      param.draw(self, width, height)  # allows current stats to modify the display
-
-  def add_stats(self, label, stats):
-    """
-    Makes GraphStats instance available in the panel.
-    """
-
-    stats._graph_panel = self
-    self.stats[label] = stats
-
-  def get_stats(self):
-    """
-    Provides the currently selected stats label.
-    """
-
-    return self.current_display
-
-  def set_stats(self, label):
-    """
-    Sets the currently displayed stats instance, hiding panel if None.
-    """
-
-    if label != self.current_display:
-      if self.current_display:
-        self.stats[self.current_display].is_selected = False
-
-      if not label:
-        self.current_display = None
-      elif label in self.stats.keys():
-        self.current_display = label
-        self.stats[self.current_display].is_selected = True
-      else:
-        raise ValueError("Unrecognized stats label: %s" % label)
-
-  def copy_attr(self, attr):
-    if attr == "stats":
-      # uses custom clone method to copy GraphStats instances
-      return dict([(key, self.stats[key].clone()) for key in self.stats])
-    else:
-      return panel.Panel.copy_attr(self, attr)
diff --git a/arm/graphing/graph_panel.py b/arm/graphing/graph_panel.py
new file mode 100644
index 0000000..8d2b5ca
--- /dev/null
+++ b/arm/graphing/graph_panel.py
@@ -0,0 +1,596 @@
+"""
+Flexible panel for presenting bar graphs for a variety of stats. This panel is
+just concerned with the rendering of information, which is actually collected
+and stored by implementations of the GraphStats interface. Panels are made up
+of a title, followed by headers and graphs for two sets of stats. For
+instance...
+
+Bandwidth (cap: 5 MB, burst: 10 MB):
+Downloaded (0.0 B/sec):           Uploaded (0.0 B/sec):
+  34                                30
+                            *                                 *
+                    **  *   *                          *      **
+      *   *  *      ** **   **          ***  **       ** **   **
+     *********      ******  ******     *********      ******  ******
+   0 ************ ****************   0 ************ ****************
+         25s  50   1m   1.6  2.0           25s  50   1m   1.6  2.0
+"""
+
+import copy
+import curses
+
+import arm.popups
+import arm.controller
+
+import stem.control
+
+from arm.util import panel, tor_tools, ui_tools
+
+from stem.util import conf, enum, str_tools
+
+# time intervals at which graphs can be updated
+
+UPDATE_INTERVALS = [
+  ("each second", 1),
+  ("5 seconds", 5),
+  ("30 seconds", 30),
+  ("minutely", 60),
+  ("15 minute", 900),
+  ("30 minute", 1800),
+  ("hourly", 3600),
+  ("daily", 86400),
+]
+
+DEFAULT_CONTENT_HEIGHT = 4  # space needed for labeling above and below the graph
+DEFAULT_COLOR_PRIMARY, DEFAULT_COLOR_SECONDARY = "green", "cyan"
+MIN_GRAPH_HEIGHT = 1
+
+# enums for graph bounds:
+#   Bounds.GLOBAL_MAX - global maximum (highest value ever seen)
+#   Bounds.LOCAL_MAX - local maximum (highest value currently on the graph)
+#   Bounds.TIGHT - local maximum and minimum
+
+Bounds = enum.Enum("GLOBAL_MAX", "LOCAL_MAX", "TIGHT")
+
+WIDE_LABELING_GRAPH_COL = 50  # minimum graph columns to use wide spacing for x-axis labels
+
+
+def conf_handler(key, value):
+  if key == "features.graph.height":
+    return max(MIN_GRAPH_HEIGHT, value)
+  elif key == "features.graph.max_width":
+    return max(1, value)
+  elif key == "features.graph.interval":
+    return max(0, min(len(UPDATE_INTERVALS) - 1, value))
+  elif key == "features.graph.bound":
+    return max(0, min(2, value))
+
+
+# used for setting defaults when initializing GraphStats and GraphPanel instances
+
+CONFIG = conf.config_dict("arm", {
+  "features.graph.height": 7,
+  "features.graph.interval": 0,
+  "features.graph.bound": 1,
+  "features.graph.max_width": 150,
+  "features.graph.showIntermediateBounds": True,
+}, conf_handler)
+
+
+class GraphStats:
+  """
+  Module that's expected to update dynamically and provide attributes to be
+  graphed. Up to two graphs (a 'primary' and 'secondary') can be displayed at a
+  time and timescale parameters use the labels defined in UPDATE_INTERVALS.
+  """
+
+  def __init__(self):
+    """
+    Initializes parameters needed to present a graph.
+    """
+
+    # panel to be redrawn when updated (set when added to GraphPanel)
+
+    self._graph_panel = None
+    self.is_selected = False
+    self.is_pause_buffer = False
+
+    # tracked stats
+
+    self.tick = 0                                    # number of processed events
+    self.last_primary, self.last_secondary = 0, 0    # most recent registered stats
+    self.primary_total, self.secondary_total = 0, 0  # sum of all stats seen
+
+    # timescale dependent stats
+
+    self.max_column = CONFIG["features.graph.max_width"]
+    self.max_primary, self.max_secondary = {}, {}
+    self.primary_counts, self.secondary_counts = {}, {}
+
+    for i in range(len(UPDATE_INTERVALS)):
+      # recent rates for graph
+
+      self.max_primary[i] = 0
+      self.max_secondary[i] = 0
+
+      # historic stats for graph, first is accumulator
+      # iterative insert needed to avoid making shallow copies (nasty, nasty gotcha)
+
+      self.primary_counts[i] = (self.max_column + 1) * [0]
+      self.secondary_counts[i] = (self.max_column + 1) * [0]
+
+    # tracks BW events
+
+    tor_tools.get_conn().add_event_listener(self.bandwidth_event, stem.control.EventType.BW)
+
+  def clone(self, new_copy=None):
+    """
+    Provides a deep copy of this instance.
+
+    Arguments:
+      new_copy - base instance to build copy off of
+    """
+
+    if not new_copy:
+      new_copy = GraphStats()
+
+    new_copy.tick = self.tick
+    new_copy.last_primary = self.last_primary
+    new_copy.last_secondary = self.last_secondary
+    new_copy.primary_total = self.primary_total
+    new_copy.secondary_total = self.secondary_total
+    new_copy.max_primary = dict(self.max_primary)
+    new_copy.max_secondary = dict(self.max_secondary)
+    new_copy.primary_counts = copy.deepcopy(self.primary_counts)
+    new_copy.secondary_counts = copy.deepcopy(self.secondary_counts)
+    new_copy.is_pause_buffer = True
+    return new_copy
+
+  def event_tick(self):
+    """
+    Called when it's time to process another event. All graphs use tor BW
+    events to keep in sync with each other (this happens once a second).
+    """
+
+    pass
+
+  def is_next_tick_redraw(self):
+    """
+    Provides true if the following tick (call to _process_event) will result in
+    being redrawn.
+    """
+
+    if self._graph_panel and self.is_selected and not self._graph_panel.is_paused():
+      # use the minimum of the current refresh rate and the panel's
+      update_rate = UPDATE_INTERVALS[self._graph_panel.update_interval][1]
+      return (self.tick + 1) % min(update_rate, self.get_refresh_rate()) == 0
+    else:
+      return False
+
+  def get_title(self, width):
+    """
+    Provides top label.
+    """
+
+    return ""
+
+  def get_header_label(self, width, is_primary):
+    """
+    Provides labeling presented at the top of the graph.
+    """
+
+    return ""
+
+  def get_color(self, is_primary):
+    """
+    Provides the color to be used for the graph and stats.
+    """
+
+    return DEFAULT_COLOR_PRIMARY if is_primary else DEFAULT_COLOR_SECONDARY
+
+  def get_content_height(self):
+    """
+    Provides the height content should take up (not including the graph).
+    """
+
+    return DEFAULT_CONTENT_HEIGHT
+
+  def get_refresh_rate(self):
+    """
+    Provides the number of ticks between when the stats have new values to be
+    redrawn.
+    """
+
+    return 1
+
+  def is_visible(self):
+    """
+    True if the stat has content to present, false if it should be hidden.
+    """
+
+    return True
+
+  def draw(self, panel, width, height):
+    """
+    Allows for any custom drawing monitor wishes to append.
+    """
+
+    pass
+
+  def bandwidth_event(self, event):
+    if not self.is_pause_buffer:
+      self.event_tick()
+
+  def _process_event(self, primary, secondary):
+    """
+    Includes new stats in graphs and notifies associated GraphPanel of changes.
+    """
+
+    is_redraw = self.is_next_tick_redraw()
+
+    self.last_primary, self.last_secondary = primary, secondary
+    self.primary_total += primary
+    self.secondary_total += secondary
+
+    # updates for all time intervals
+
+    self.tick += 1
+
+    for i in range(len(UPDATE_INTERVALS)):
+      lable, timescale = UPDATE_INTERVALS[i]
+
+      self.primary_counts[i][0] += primary
+      self.secondary_counts[i][0] += secondary
+
+      if self.tick % timescale == 0:
+        self.max_primary[i] = max(self.max_primary[i], self.primary_counts[i][0] / timescale)
+        self.primary_counts[i][0] /= timescale
+        self.primary_counts[i].insert(0, 0)
+        del self.primary_counts[i][self.max_column + 1:]
+
+        self.max_secondary[i] = max(self.max_secondary[i], self.secondary_counts[i][0] / timescale)
+        self.secondary_counts[i][0] /= timescale
+        self.secondary_counts[i].insert(0, 0)
+        del self.secondary_counts[i][self.max_column + 1:]
+
+    if is_redraw and self._graph_panel:
+      self._graph_panel.redraw(True)
+
+
+class GraphPanel(panel.Panel):
+  """
+  Panel displaying a graph, drawing statistics from custom GraphStats
+  implementations.
+  """
+
+  def __init__(self, stdscr):
+    panel.Panel.__init__(self, stdscr, "graph", 0)
+    self.update_interval = CONFIG["features.graph.interval"]
+    self.bounds = list(Bounds)[CONFIG["features.graph.bound"]]
+    self.graph_height = CONFIG["features.graph.height"]
+    self.current_display = None    # label of the stats currently being displayed
+    self.stats = {}                # available stats (mappings of label -> instance)
+    self.set_pause_attr("stats")
+
+  def get_update_interval(self):
+    """
+    Provides the rate that we update the graph at.
+    """
+
+    return self.update_interval
+
+  def set_update_interval(self, update_interval):
+    """
+    Sets the rate that we update the graph at.
+
+    Arguments:
+      update_interval - update time enum
+    """
+
+    self.update_interval = update_interval
+
+  def get_bounds_type(self):
+    """
+    Provides the type of graph bounds used.
+    """
+
+    return self.bounds
+
+  def set_bounds_type(self, bounds_type):
+    """
+    Sets the type of graph boundaries we use.
+
+    Arguments:
+      bounds_type - graph bounds enum
+    """
+
+    self.bounds = bounds_type
+
+  def get_height(self):
+    """
+    Provides the height requested by the currently displayed GraphStats (zero
+    if hidden).
+    """
+
+    if self.current_display and self.stats[self.current_display].is_visible():
+      return self.stats[self.current_display].get_content_height() + self.graph_height
+    else:
+      return 0
+
+  def set_graph_height(self, new_graph_height):
+    """
+    Sets the preferred height used for the graph (restricted to the
+    MIN_GRAPH_HEIGHT minimum).
+
+    Arguments:
+      new_graph_height - new height for the graph
+    """
+
+    self.graph_height = max(MIN_GRAPH_HEIGHT, new_graph_height)
+
+  def resize_graph(self):
+    """
+    Prompts for user input to resize the graph panel. Options include...
+      down arrow - grow graph
+      up arrow - shrink graph
+      enter / space - set size
+    """
+
+    control = arm.controller.get_controller()
+
+    panel.CURSES_LOCK.acquire()
+
+    try:
+      while True:
+        msg = "press the down/up to resize the graph, and enter when done"
+        control.set_msg(msg, curses.A_BOLD, True)
+        curses.cbreak()
+        key = control.get_screen().getch()
+
+        if key == curses.KEY_DOWN:
+          # don't grow the graph if it's already consuming the whole display
+          # (plus an extra line for the graph/log gap)
+
+          max_height = self.parent.getmaxyx()[0] - self.top
+          current_height = self.get_height()
+
+          if current_height < max_height + 1:
+            self.set_graph_height(self.graph_height + 1)
+        elif key == curses.KEY_UP:
+          self.set_graph_height(self.graph_height - 1)
+        elif ui_tools.is_selection_key(key):
+          break
+
+        control.redraw()
+    finally:
+      control.set_msg()
+      panel.CURSES_LOCK.release()
+
+  def handle_key(self, key):
+    is_keystroke_consumed = True
+
+    if key == ord('r') or key == ord('R'):
+      self.resize_graph()
+    elif key == ord('b') or key == ord('B'):
+      # uses the next boundary type
+      self.bounds = Bounds.next(self.bounds)
+      self.redraw(True)
+    elif key == ord('s') or key == ord('S'):
+      # provides a menu to pick the graphed stats
+
+      available_stats = self.stats.keys()
+      available_stats.sort()
+
+      # uses sorted, camel cased labels for the options
+
+      options = ["None"]
+
+      for label in available_stats:
+        words = label.split()
+        options.append(" ".join(word[0].upper() + word[1:] for word in words))
+
+      if self.current_display:
+        initial_selection = available_stats.index(self.current_display) + 1
+      else:
+        initial_selection = 0
+
+      selection = arm.popups.show_menu("Graphed Stats:", options, initial_selection)
+
+      # applies new setting
+
+      if selection == 0:
+        self.set_stats(None)
+      elif selection != -1:
+        self.set_stats(available_stats[selection - 1])
+    elif key == ord('i') or key == ord('I'):
+      # provides menu to pick graph panel update interval
+
+      options = [label for (label, _) in UPDATE_INTERVALS]
+      selection = arm.popups.show_menu("Update Interval:", options, self.update_interval)
+
+      if selection != -1:
+        self.update_interval = selection
+    else:
+      is_keystroke_consumed = False
+
+    return is_keystroke_consumed
+
+  def get_help(self):
+    if self.current_display:
+      graphed_stats = self.current_display
+    else:
+      graphed_stats = "none"
+
+    options = []
+    options.append(("r", "resize graph", None))
+    options.append(("s", "graphed stats", graphed_stats))
+    options.append(("b", "graph bounds", self.bounds.lower()))
+    options.append(("i", "graph update interval", UPDATE_INTERVALS[self.update_interval][0]))
+    return options
+
+  def draw(self, width, height):
+    """ Redraws graph panel """
+
+    if self.current_display:
+      param = self.get_attr("stats")[self.current_display]
+      graph_column = min((width - 10) / 2, param.max_column)
+
+      primary_color = ui_tools.get_color(param.get_color(True))
+      secondary_color = ui_tools.get_color(param.get_color(False))
+
+      if self.is_title_visible():
+        self.addstr(0, 0, param.get_title(width), curses.A_STANDOUT)
+
+      # top labels
+
+      left, right = param.get_header_label(width / 2, True), param.get_header_label(width / 2, False)
+
+      if left:
+        self.addstr(1, 0, left, curses.A_BOLD | primary_color)
+
+      if right:
+        self.addstr(1, graph_column + 5, right, curses.A_BOLD | secondary_color)
+
+      # determines max/min value on the graph
+
+      if self.bounds == Bounds.GLOBAL_MAX:
+        primary_max_bound = int(param.max_primary[self.update_interval])
+        secondary_max_bound = int(param.max_secondary[self.update_interval])
+      else:
+        # both Bounds.LOCAL_MAX and Bounds.TIGHT use local maxima
+        if graph_column < 2:
+          # nothing being displayed
+          primary_max_bound, secondary_max_bound = 0, 0
+        else:
+          primary_max_bound = int(max(param.primary_counts[self.update_interval][1:graph_column + 1]))
+          secondary_max_bound = int(max(param.secondary_counts[self.update_interval][1:graph_column + 1]))
+
+      primary_min_bound = secondary_min_bound = 0
+
+      if self.bounds == Bounds.TIGHT:
+        primary_min_bound = int(min(param.primary_counts[self.update_interval][1:graph_column + 1]))
+        secondary_min_bound = int(min(param.secondary_counts[self.update_interval][1:graph_column + 1]))
+
+        # if the max = min (ie, all values are the same) then use zero lower
+        # bound so a graph is still displayed
+
+        if primary_min_bound == primary_max_bound:
+          primary_min_bound = 0
+
+        if secondary_min_bound == secondary_max_bound:
+          secondary_min_bound = 0
+
+      # displays upper and lower bounds
+
+      self.addstr(2, 0, "%4i" % primary_max_bound, primary_color)
+      self.addstr(self.graph_height + 1, 0, "%4i" % primary_min_bound, primary_color)
+
+      self.addstr(2, graph_column + 5, "%4i" % secondary_max_bound, secondary_color)
+      self.addstr(self.graph_height + 1, graph_column + 5, "%4i" % secondary_min_bound, secondary_color)
+
+      # displays intermediate bounds on every other row
+
+      if CONFIG["features.graph.showIntermediateBounds"]:
+        ticks = (self.graph_height - 3) / 2
+
+        for i in range(ticks):
+          row = self.graph_height - (2 * i) - 3
+
+          if self.graph_height % 2 == 0 and i >= (ticks / 2):
+            row -= 1
+
+          if primary_min_bound != primary_max_bound:
+            primary_val = (primary_max_bound - primary_min_bound) * (self.graph_height - row - 1) / (self.graph_height - 1)
+
+            if not primary_val in (primary_min_bound, primary_max_bound):
+              self.addstr(row + 2, 0, "%4i" % primary_val, primary_color)
+
+          if secondary_min_bound != secondary_max_bound:
+            secondary_val = (secondary_max_bound - secondary_min_bound) * (self.graph_height - row - 1) / (self.graph_height - 1)
+
+            if not secondary_val in (secondary_min_bound, secondary_max_bound):
+              self.addstr(row + 2, graph_column + 5, "%4i" % secondary_val, secondary_color)
+
+      # creates bar graph (both primary and secondary)
+
+      for col in range(graph_column):
+        column_count = int(param.primary_counts[self.update_interval][col + 1]) - primary_min_bound
+        column_height = min(self.graph_height, self.graph_height * column_count / (max(1, primary_max_bound) - primary_min_bound))
+
+        for row in range(column_height):
+          self.addstr(self.graph_height + 1 - row, col + 5, " ", curses.A_STANDOUT | primary_color)
+
+        column_count = int(param.secondary_counts[self.update_interval][col + 1]) - secondary_min_bound
+        column_height = min(self.graph_height, self.graph_height * column_count / (max(1, secondary_max_bound) - secondary_min_bound))
+
+        for row in range(column_height):
+          self.addstr(self.graph_height + 1 - row, col + graph_column + 10, " ", curses.A_STANDOUT | secondary_color)
+
+      # bottom labeling of x-axis
+
+      interval_sec = 1  # seconds per labeling
+
+      for i in range(len(UPDATE_INTERVALS)):
+        if i == self.update_interval:
+          interval_sec = UPDATE_INTERVALS[i][1]
+
+      interval_spacing = 10 if graph_column >= WIDE_LABELING_GRAPH_COL else 5
+      units_label, decimal_precision = None, 0
+
+      for i in range((graph_column - 4) / interval_spacing):
+        loc = (i + 1) * interval_spacing
+        time_label = str_tools.get_time_label(loc * interval_sec, decimal_precision)
+
+        if not units_label:
+          units_label = time_label[-1]
+        elif units_label != time_label[-1]:
+          # upped scale so also up precision of future measurements
+          units_label = time_label[-1]
+          decimal_precision += 1
+        else:
+          # if constrained on space then strips labeling since already provided
+          time_label = time_label[:-1]
+
+        self.addstr(self.graph_height + 2, 4 + loc, time_label, primary_color)
+        self.addstr(self.graph_height + 2, graph_column + 10 + loc, time_label, secondary_color)
+
+      param.draw(self, width, height)  # allows current stats to modify the display
+
+  def add_stats(self, label, stats):
+    """
+    Makes GraphStats instance available in the panel.
+    """
+
+    stats._graph_panel = self
+    self.stats[label] = stats
+
+  def get_stats(self):
+    """
+    Provides the currently selected stats label.
+    """
+
+    return self.current_display
+
+  def set_stats(self, label):
+    """
+    Sets the currently displayed stats instance, hiding panel if None.
+    """
+
+    if label != self.current_display:
+      if self.current_display:
+        self.stats[self.current_display].is_selected = False
+
+      if not label:
+        self.current_display = None
+      elif label in self.stats.keys():
+        self.current_display = label
+        self.stats[self.current_display].is_selected = True
+      else:
+        raise ValueError("Unrecognized stats label: %s" % label)
+
+  def copy_attr(self, attr):
+    if attr == "stats":
+      # uses custom clone method to copy GraphStats instances
+      return dict([(key, self.stats[key].clone()) for key in self.stats])
+    else:
+      return panel.Panel.copy_attr(self, attr)
diff --git a/arm/graphing/resourceStats.py b/arm/graphing/resourceStats.py
deleted file mode 100644
index dc3aaf9..0000000
--- a/arm/graphing/resourceStats.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""
-Tracks the system resource usage (cpu and memory) of the tor process.
-"""
-
-import arm.util.tracker
-
-from arm.graphing import graphPanel
-from arm.util import torTools
-
-from stem.util import str_tools
-
-
-class ResourceStats(graphPanel.GraphStats):
-  """
-  System resource usage tracker.
-  """
-
-  def __init__(self):
-    graphPanel.GraphStats.__init__(self)
-    self.query_pid = torTools.get_conn().controller.get_pid(None)
-    self.last_counter = None
-
-  def clone(self, new_copy=None):
-    if not new_copy:
-      new_copy = ResourceStats()
-
-    return graphPanel.GraphStats.clone(self, new_copy)
-
-  def get_title(self, width):
-    return "System Resources:"
-
-  def get_header_label(self, width, is_primary):
-    avg = (self.primary_total if is_primary else self.secondary_total) / max(1, self.tick)
-    last_amount = self.last_primary if is_primary else self.last_secondary
-
-    if is_primary:
-      return "CPU (%0.1f%%, avg: %0.1f%%):" % (last_amount, avg)
-    else:
-      # memory sizes are converted from MB to B before generating labels
-
-      usage_label = str_tools.get_size_label(last_amount * 1048576, 1)
-      avg_label = str_tools.get_size_label(avg * 1048576, 1)
-
-      return "Memory (%s, avg: %s):" % (usage_label, avg_label)
-
-  def event_tick(self):
-    """
-    Fetch the cached measurement of resource usage from the ResourceTracker.
-    """
-
-    primary, secondary = 0, 0
-
-    if self.query_pid:
-      resource_tracker = arm.util.tracker.get_resource_tracker()
-
-      if resource_tracker and resource_tracker.run_counter() != self.last_counter:
-        resources = resource_tracker.get_resource_usage()
-        self.last_counter = resource_tracker.run_counter()
-        primary = resources.cpu_sample * 100  # decimal percentage to whole numbers
-        secondary = resources.memory_bytes / 1048576  # translate size to MB so axis labels are short
-        self.run_count = resource_tracker.run_counter()
-
-    self._process_event(primary, secondary)
diff --git a/arm/graphing/resource_stats.py b/arm/graphing/resource_stats.py
new file mode 100644
index 0000000..2803bd1
--- /dev/null
+++ b/arm/graphing/resource_stats.py
@@ -0,0 +1,63 @@
+"""
+Tracks the system resource usage (cpu and memory) of the tor process.
+"""
+
+import arm.util.tracker
+
+from arm.graphing import graph_panel
+from arm.util import tor_tools
+
+from stem.util import str_tools
+
+
+class ResourceStats(graph_panel.GraphStats):
+  """
+  System resource usage tracker.
+  """
+
+  def __init__(self):
+    graph_panel.GraphStats.__init__(self)
+    self.query_pid = tor_tools.get_conn().controller.get_pid(None)
+    self.last_counter = None
+
+  def clone(self, new_copy=None):
+    if not new_copy:
+      new_copy = ResourceStats()
+
+    return graph_panel.GraphStats.clone(self, new_copy)
+
+  def get_title(self, width):
+    return "System Resources:"
+
+  def get_header_label(self, width, is_primary):
+    avg = (self.primary_total if is_primary else self.secondary_total) / max(1, self.tick)
+    last_amount = self.last_primary if is_primary else self.last_secondary
+
+    if is_primary:
+      return "CPU (%0.1f%%, avg: %0.1f%%):" % (last_amount, avg)
+    else:
+      # memory sizes are converted from MB to B before generating labels
+
+      usage_label = str_tools.get_size_label(last_amount * 1048576, 1)
+      avg_label = str_tools.get_size_label(avg * 1048576, 1)
+
+      return "Memory (%s, avg: %s):" % (usage_label, avg_label)
+
+  def event_tick(self):
+    """
+    Fetch the cached measurement of resource usage from the ResourceTracker.
+    """
+
+    primary, secondary = 0, 0
+
+    if self.query_pid:
+      resource_tracker = arm.util.tracker.get_resource_tracker()
+
+      if resource_tracker and resource_tracker.run_counter() != self.last_counter:
+        resources = resource_tracker.get_resource_usage()
+        self.last_counter = resource_tracker.run_counter()
+        primary = resources.cpu_sample * 100  # decimal percentage to whole numbers
+        secondary = resources.memory_bytes / 1048576  # translate size to MB so axis labels are short
+        self.run_count = resource_tracker.run_counter()
+
+    self._process_event(primary, secondary)
diff --git a/arm/headerPanel.py b/arm/headerPanel.py
deleted file mode 100644
index 744079b..0000000
--- a/arm/headerPanel.py
+++ /dev/null
@@ -1,679 +0,0 @@
-"""
-Top panel for every page, containing basic system and tor related information.
-If there's room available then this expands to present its information in two
-columns, otherwise it's laid out as follows:
-  arm - <hostname> (<os> <sys/version>)         Tor <tor/version> (<new, old, recommended, etc>)
-  <nickname> - <address>:<or_port>, [Dir Port: <dir_port>, ]Control Port (<open, password, cookie>): <control_port>
-  cpu: <cpu%> mem: <mem> (<mem%>) uid: <uid> uptime: <upmin>:<upsec>
-  fingerprint: <fingerprint>
-
-Example:
-  arm - odin (Linux 2.6.24-24-generic)         Tor 0.2.1.19 (recommended)
-  odin - 76.104.132.98:9001, Dir Port: 9030, Control Port (cookie): 9051
-  cpu: 14.6%    mem: 42 MB (4.2%)    pid: 20060   uptime: 48:27
-  fingerprint: BDAD31F6F318E0413833E8EBDA956F76E4D66788
-"""
-
-import os
-import time
-import curses
-import threading
-
-import arm.util.tracker
-
-from stem.control import State
-from stem.util import conf, log, str_tools
-
-import arm.starter
-import arm.popups
-import arm.controller
-
-from util import panel, torTools, uiTools
-
-# minimum width for which panel attempts to double up contents (two columns to
-# better use screen real estate)
-
-MIN_DUAL_COL_WIDTH = 141
-
-FLAG_COLORS = {
-  "Authority": "white",
-  "BadExit": "red",
-  "BadDirectory": "red",
-  "Exit": "cyan",
-  "Fast": "yellow",
-  "Guard": "green",
-  "HSDir": "magenta",
-  "Named": "blue",
-  "Stable": "blue",
-  "Running": "yellow",
-  "Unnamed": "magenta",
-  "Valid": "green",
-  "V2Dir": "cyan",
-  "V3Dir": "white",
-}
-
-VERSION_STATUS_COLORS = {
-  "new": "blue",
-  "new in series": "blue",
-  "obsolete": "red",
-  "recommended": "green",
-  "old": "red",
-  "unrecommended": "red",
-  "unknown": "cyan",
-}
-
-CONFIG = conf.config_dict("arm", {
-  "features.showFdUsage": False,
-})
-
-
-class HeaderPanel(panel.Panel, threading.Thread):
-  """
-  Top area contenting tor settings and system information. Stats are stored in
-  the vals mapping, keys including:
-    tor/  version, versionStatus, nickname, or_port, dir_port, control_port,
-          socketPath, exit_policy, isAuthPassword (bool), isAuthCookie (bool),
-          orListenAddr, *address, *fingerprint, *flags, pid, start_time,
-          *fd_used, fd_limit, isFdLimitEstimate
-    sys/  hostname, os, version
-    stat/ *%torCpu, *%armCpu, *rss, *%mem
-
-  * volatile parameter that'll be reset on each update
-  """
-
-  def __init__(self, stdscr, start_time):
-    panel.Panel.__init__(self, stdscr, "header", 0)
-    threading.Thread.__init__(self)
-    self.setDaemon(True)
-
-    self._is_tor_connected = torTools.get_conn().is_alive()
-    self._last_update = -1       # time the content was last revised
-    self._halt = False           # terminates thread if true
-    self._cond = threading.Condition()  # used for pausing the thread
-
-    # Time when the panel was paused or tor was stopped. This is used to
-    # freeze the uptime statistic (uptime increments normally when None).
-
-    self._halt_time = None
-
-    # The last arm cpu usage sampling taken. This is a tuple of the form:
-    # (total arm cpu time, sampling timestamp)
-    #
-    # The initial cpu total should be zero. However, at startup the cpu time
-    # in practice is often greater than the real time causing the initially
-    # reported cpu usage to be over 100% (which shouldn't be possible on
-    # single core systems).
-    #
-    # Setting the initial cpu total to the value at this panel's init tends to
-    # give smoother results (staying in the same ballpark as the second
-    # sampling) so fudging the numbers this way for now.
-
-    self._arm_cpu_sampling = (sum(os.times()[:3]), start_time)
-
-    # Last sampling received from the ResourceTracker, used to detect when it
-    # changes.
-
-    self._last_resource_fetch = -1
-
-    # flag to indicate if we've already given file descriptor warnings
-
-    self._is_fd_sixty_percent_warned = False
-    self._is_fd_ninety_percent_warned = False
-
-    self.vals = {}
-    self.vals_lock = threading.RLock()
-    self._update(True)
-
-    # listens for tor reload (sighup) events
-
-    torTools.get_conn().add_status_listener(self.reset_listener)
-
-  def get_height(self):
-    """
-    Provides the height of the content, which is dynamically determined by the
-    panel's maximum width.
-    """
-
-    is_wide = self.get_parent().getmaxyx()[1] >= MIN_DUAL_COL_WIDTH
-
-    if self.vals["tor/or_port"]:
-      return 4 if is_wide else 6
-    else:
-      return 3 if is_wide else 4
-
-  def send_newnym(self):
-    """
-    Requests a new identity and provides a visual queue.
-    """
-
-    torTools.get_conn().send_newnym()
-
-    # If we're wide then the newnym label in this panel will give an
-    # indication that the signal was sent. Otherwise use a msg.
-
-    is_wide = self.get_parent().getmaxyx()[1] >= MIN_DUAL_COL_WIDTH
-
-    if not is_wide:
-      arm.popups.show_msg("Requesting a new identity", 1)
-
-  def handle_key(self, key):
-    is_keystroke_consumed = True
-
-    if key in (ord('n'), ord('N')) and torTools.get_conn().is_newnym_available():
-      self.send_newnym()
-    elif key in (ord('r'), ord('R')) and not self._is_tor_connected:
-      #oldSocket = torTools.get_conn().get_controller().get_socket()
-      #
-      #controller = None
-      #allowPortConnection, allowSocketConnection, _ = starter.allowConnectionTypes()
-      #
-      #if os.path.exists(CONFIG["startup.interface.socket"]) and allowSocketConnection:
-      #  try:
-      #    # TODO: um... what about passwords?
-      #    controller = Controller.from_socket_file(CONFIG["startup.interface.socket"])
-      #    controller.authenticate()
-      #  except (IOError, stem.SocketError), exc:
-      #    controller = None
-      #
-      #    if not allowPortConnection:
-      #      arm.popups.show_msg("Unable to reconnect (%s)" % exc, 3)
-      #elif not allowPortConnection:
-      #  arm.popups.show_msg("Unable to reconnect (socket '%s' doesn't exist)" % CONFIG["startup.interface.socket"], 3)
-      #
-      #if not controller and allowPortConnection:
-      #  # TODO: This has diverged from starter.py's connection, for instance it
-      #  # doesn't account for relative cookie paths or multiple authentication
-      #  # methods. We can't use the starter.py's connection function directly
-      #  # due to password prompts, but we could certainly make this mess more
-      #  # manageable.
-      #
-      #  try:
-      #    ctlAddr, ctl_port = CONFIG["startup.interface.ip_address"], CONFIG["startup.interface.port"]
-      #    controller = Controller.from_port(ctlAddr, ctl_port)
-      #
-      #    try:
-      #      controller.authenticate()
-      #    except stem.connection.MissingPassword:
-      #      controller.authenticate(authValue) # already got the password above
-      #  except Exception, exc:
-      #    controller = None
-      #
-      #if controller:
-      #  torTools.get_conn().init(controller)
-      #  log.notice("Reconnected to Tor's control port")
-      #  arm.popups.show_msg("Tor reconnected", 1)
-
-      pass
-    else:
-      is_keystroke_consumed = False
-
-    return is_keystroke_consumed
-
-  def draw(self, width, height):
-    self.vals_lock.acquire()
-    is_wide = width + 1 >= MIN_DUAL_COL_WIDTH
-
-    # space available for content
-
-    if is_wide:
-      left_width = max(width / 2, 77)
-      right_width = width - left_width
-    else:
-      left_width = right_width = width
-
-    # Line 1 / Line 1 Left (system and tor version information)
-
-    sys_name_label = "arm - %s" % self.vals["sys/hostname"]
-    content_space = min(left_width, 40)
-
-    if len(sys_name_label) + 10 <= content_space:
-      sys_type_label = "%s %s" % (self.vals["sys/os"], self.vals["sys/version"])
-      sys_type_label = uiTools.crop_str(sys_type_label, content_space - len(sys_name_label) - 3, 4)
-      self.addstr(0, 0, "%s (%s)" % (sys_name_label, sys_type_label))
-    else:
-      self.addstr(0, 0, uiTools.crop_str(sys_name_label, content_space))
-
-    content_space = left_width - 43
-
-    if 7 + len(self.vals["tor/version"]) + len(self.vals["tor/versionStatus"]) <= content_space:
-      if self.vals["tor/version"] != "Unknown":
-        version_color = VERSION_STATUS_COLORS[self.vals["tor/versionStatus"]] if self.vals["tor/versionStatus"] in VERSION_STATUS_COLORS else "white"
-
-        label_prefix = "Tor %s (" % self.vals["tor/version"]
-        self.addstr(0, 43, label_prefix)
-        self.addstr(0, 43 + len(label_prefix), self.vals["tor/versionStatus"], uiTools.get_color(version_color))
-        self.addstr(0, 43 + len(label_prefix) + len(self.vals["tor/versionStatus"]), ")")
-    elif 11 <= content_space:
-      self.addstr(0, 43, uiTools.crop_str("Tor %s" % self.vals["tor/version"], content_space, 4))
-
-    # Line 2 / Line 2 Left (tor ip/port information)
-
-    x, include_control_port = 0, True
-
-    if self.vals["tor/or_port"]:
-      my_address = "Unknown"
-
-      if self.vals["tor/orListenAddr"]:
-        my_address = self.vals["tor/orListenAddr"]
-      elif self.vals["tor/address"]:
-        my_address = self.vals["tor/address"]
-
-      # acting as a relay (we can assume certain parameters are set
-
-      dir_port_label = ", Dir Port: %s" % self.vals["tor/dir_port"] if self.vals["tor/dir_port"] != "0" else ""
-
-      for label in (self.vals["tor/nickname"], " - " + my_address, ":" + self.vals["tor/or_port"], dir_port_label):
-        if x + len(label) <= left_width:
-          self.addstr(1, x, label)
-          x += len(label)
-        else:
-          break
-    else:
-      # non-relay (client only)
-
-      if self._is_tor_connected:
-        self.addstr(1, x, "Relaying Disabled", uiTools.get_color("cyan"))
-        x += 17
-      else:
-        status_time = torTools.get_conn().controller.get_latest_heartbeat()
-
-        if status_time:
-          status_time_label = time.strftime("%H:%M %m/%d/%Y, ", time.localtime(status_time))
-        else:
-          status_time_label = ""  # never connected to tor
-
-        self.addstr(1, x, "Tor Disconnected", curses.A_BOLD | uiTools.get_color("red"))
-        self.addstr(1, x + 16, " (%spress r to reconnect)" % status_time_label)
-        x += 39 + len(status_time_label)
-        include_control_port = False
-
-    if include_control_port:
-      if self.vals["tor/control_port"] == "0":
-        # connected via a control socket
-        self.addstr(1, x, ", Control Socket: %s" % self.vals["tor/socketPath"])
-      else:
-        if self.vals["tor/isAuthPassword"]:
-          auth_type = "password"
-        elif self.vals["tor/isAuthCookie"]:
-          auth_type = "cookie"
-        else:
-          auth_type = "open"
-
-        if x + 19 + len(self.vals["tor/control_port"]) + len(auth_type) <= left_width:
-          auth_color = "red" if auth_type == "open" else "green"
-          self.addstr(1, x, ", Control Port (")
-          self.addstr(1, x + 16, auth_type, uiTools.get_color(auth_color))
-          self.addstr(1, x + 16 + len(auth_type), "): %s" % self.vals["tor/control_port"])
-        elif x + 16 + len(self.vals["tor/control_port"]) <= left_width:
-          self.addstr(1, 0, ", Control Port: %s" % self.vals["tor/control_port"])
-
-    # Line 3 / Line 1 Right (system usage info)
-
-    y, x = (0, left_width) if is_wide else (2, 0)
-
-    if self.vals["stat/rss"] != "0":
-      memory_label = str_tools.get_size_label(int(self.vals["stat/rss"]))
-    else:
-      memory_label = "0"
-
-    uptime_label = ""
-
-    if self.vals["tor/start_time"]:
-      if self.is_paused() or not self._is_tor_connected:
-        # freeze the uptime when paused or the tor process is stopped
-        uptime_label = str_tools.get_short_time_label(self.get_pause_time() - self.vals["tor/start_time"])
-      else:
-        uptime_label = str_tools.get_short_time_label(time.time() - self.vals["tor/start_time"])
-
-    sys_fields = ((0, "cpu: %s%% tor, %s%% arm" % (self.vals["stat/%torCpu"], self.vals["stat/%armCpu"])),
-                  (27, "mem: %s (%s%%)" % (memory_label, self.vals["stat/%mem"])),
-                  (47, "pid: %s" % (self.vals["tor/pid"] if self._is_tor_connected else "")),
-                  (59, "uptime: %s" % uptime_label))
-
-    for (start, label) in sys_fields:
-      if start + len(label) <= right_width:
-        self.addstr(y, x + start, label)
-      else:
-        break
-
-    if self.vals["tor/or_port"]:
-      # Line 4 / Line 2 Right (fingerprint, and possibly file descriptor usage)
-
-      y, x = (1, left_width) if is_wide else (3, 0)
-
-      fingerprint_label = uiTools.crop_str("fingerprint: %s" % self.vals["tor/fingerprint"], width)
-      self.addstr(y, x, fingerprint_label)
-
-      # if there's room and we're able to retrieve both the file descriptor
-      # usage and limit then it might be presented
-
-      if width - x - 59 >= 20 and self.vals["tor/fd_used"] and self.vals["tor/fd_limit"]:
-        # display file descriptor usage if we're either configured to do so or
-        # running out
-
-        fd_percent = 100 * self.vals["tor/fd_used"] / self.vals["tor/fd_limit"]
-
-        if fd_percent >= 60 or CONFIG["features.showFdUsage"]:
-          fd_percentLabel, fd_percent_format = "%i%%" % fd_percent, curses.A_NORMAL
-
-          if fd_percent >= 95:
-            fd_percent_format = curses.A_BOLD | uiTools.get_color("red")
-          elif fd_percent >= 90:
-            fd_percent_format = uiTools.get_color("red")
-          elif fd_percent >= 60:
-            fd_percent_format = uiTools.get_color("yellow")
-
-          estimate_char = "?" if self.vals["tor/isFdLimitEstimate"] else ""
-          base_label = "file desc: %i / %i%s (" % (self.vals["tor/fd_used"], self.vals["tor/fd_limit"], estimate_char)
-
-          self.addstr(y, x + 59, base_label)
-          self.addstr(y, x + 59 + len(base_label), fd_percentLabel, fd_percent_format)
-          self.addstr(y, x + 59 + len(base_label) + len(fd_percentLabel), ")")
-
-      # Line 5 / Line 3 Left (flags)
-
-      if self._is_tor_connected:
-        y, x = (2 if is_wide else 4, 0)
-        self.addstr(y, x, "flags: ")
-        x += 7
-
-        if len(self.vals["tor/flags"]) > 0:
-          for i in range(len(self.vals["tor/flags"])):
-            flag = self.vals["tor/flags"][i]
-            flag_color = FLAG_COLORS[flag] if flag in FLAG_COLORS.keys() else "white"
-
-            self.addstr(y, x, flag, curses.A_BOLD | uiTools.get_color(flag_color))
-            x += len(flag)
-
-            if i < len(self.vals["tor/flags"]) - 1:
-              self.addstr(y, x, ", ")
-              x += 2
-        else:
-          self.addstr(y, x, "none", curses.A_BOLD | uiTools.get_color("cyan"))
-      else:
-        y = 2 if is_wide else 4
-        status_time = torTools.get_conn().controller.get_latest_heartbeat()
-        status_time_label = time.strftime("%H:%M %m/%d/%Y", time.localtime(status_time))
-        self.addstr(y, 0, "Tor Disconnected", curses.A_BOLD | uiTools.get_color("red"))
-        self.addstr(y, 16, " (%s) - press r to reconnect" % status_time_label)
-
-      # Undisplayed / Line 3 Right (exit policy)
-
-      if is_wide:
-        exit_policy = self.vals["tor/exit_policy"]
-
-        # adds note when default exit policy is appended
-
-        if exit_policy == "":
-          exit_policy = "<default>"
-        elif not exit_policy.endswith((" *:*", " *")):
-          exit_policy += ", <default>"
-
-        self.addstr(2, left_width, "exit policy: ")
-        x = left_width + 13
-
-        # color codes accepts to be green, rejects to be red, and default marker to be cyan
-
-        is_simple = len(exit_policy) > right_width - 13
-        policies = exit_policy.split(", ")
-
-        for i in range(len(policies)):
-          policy = policies[i].strip()
-          policy_label = policy.replace("accept", "").replace("reject", "").strip() if is_simple else policy
-
-          policy_color = "white"
-
-          if policy.startswith("accept"):
-            policy_color = "green"
-          elif policy.startswith("reject"):
-            policy_color = "red"
-          elif policy.startswith("<default>"):
-            policy_color = "cyan"
-
-          self.addstr(2, x, policy_label, curses.A_BOLD | uiTools.get_color(policy_color))
-          x += len(policy_label)
-
-          if i < len(policies) - 1:
-            self.addstr(2, x, ", ")
-            x += 2
-    else:
-      # (Client only) Undisplayed / Line 2 Right (new identity option)
-
-      if is_wide:
-        conn = torTools.get_conn()
-        newnym_wait = conn.get_newnym_wait()
-
-        msg = "press 'n' for a new identity"
-
-        if newnym_wait > 0:
-          plural_label = "s" if newnym_wait > 1 else ""
-          msg = "building circuits, available again in %i second%s" % (newnym_wait, plural_label)
-
-        self.addstr(1, left_width, msg)
-
-    self.vals_lock.release()
-
-  def get_pause_time(self):
-    """
-    Provides the time Tor stopped if it isn't running. Otherwise this is the
-    time we were last paused.
-    """
-
-    if self._halt_time:
-      return self._halt_time
-    else:
-      return panel.Panel.get_pause_time(self)
-
-  def run(self):
-    """
-    Keeps stats updated, checking for new information at a set rate.
-    """
-
-    last_draw = time.time() - 1
-
-    while not self._halt:
-      current_time = time.time()
-
-      if self.is_paused() or current_time - last_draw < 1 or not self._is_tor_connected:
-        self._cond.acquire()
-
-        if not self._halt:
-          self._cond.wait(0.2)
-
-        self._cond.release()
-      else:
-        # Update the volatile attributes (cpu, memory, flags, etc) if we have
-        # a new resource usage sampling (the most dynamic stat) or its been
-        # twenty seconds since last fetched (so we still refresh occasionally
-        # when resource fetches fail).
-        #
-        # Otherwise, just redraw the panel to change the uptime field.
-
-        is_changed = False
-
-        if self.vals["tor/pid"]:
-          resource_tracker = arm.util.tracker.get_resource_tracker()
-          is_changed = self._last_resource_fetch != resource_tracker.run_counter()
-
-        if is_changed or current_time - self._last_update >= 20:
-          self._update()
-
-        self.redraw(True)
-        last_draw += 1
-
-  def stop(self):
-    """
-    Halts further resolutions and terminates the thread.
-    """
-
-    self._cond.acquire()
-    self._halt = True
-    self._cond.notifyAll()
-    self._cond.release()
-
-  def reset_listener(self, controller, event_type, _):
-    """
-    Updates static parameters on tor reload (sighup) events.
-    """
-
-    if event_type in (State.INIT, State.RESET):
-      initial_height = self.get_height()
-      self._is_tor_connected = True
-      self._halt_time = None
-      self._update(True)
-
-      if self.get_height() != initial_height:
-        # We're toggling between being a relay and client, causing the height
-        # of this panel to change. Redraw all content so we don't get
-        # overlapping content.
-
-        arm.controller.get_controller().redraw()
-      else:
-        # just need to redraw ourselves
-        self.redraw(True)
-    elif event_type == State.CLOSED:
-      self._is_tor_connected = False
-      self._halt_time = time.time()
-      self._update()
-      self.redraw(True)
-
-  def _update(self, set_static=False):
-    """
-    Updates stats in the vals mapping. By default this just revises volatile
-    attributes.
-
-    Arguments:
-      set_static - resets all parameters, including relatively static values
-    """
-
-    self.vals_lock.acquire()
-    conn = torTools.get_conn()
-
-    if set_static:
-      # version is truncated to first part, for instance:
-      # 0.2.2.13-alpha (git-feb8c1b5f67f2c6f) -> 0.2.2.13-alpha
-
-      self.vals["tor/version"] = conn.get_info("version", "Unknown").split()[0]
-      self.vals["tor/versionStatus"] = conn.get_info("status/version/current", "Unknown")
-      self.vals["tor/nickname"] = conn.get_option("Nickname", "")
-      self.vals["tor/or_port"] = conn.get_option("ORPort", "0")
-      self.vals["tor/dir_port"] = conn.get_option("DirPort", "0")
-      self.vals["tor/control_port"] = conn.get_option("ControlPort", "0")
-      self.vals["tor/socketPath"] = conn.get_option("ControlSocket", "")
-      self.vals["tor/isAuthPassword"] = conn.get_option("HashedControlPassword", None) is not None
-      self.vals["tor/isAuthCookie"] = conn.get_option("CookieAuthentication", None) == "1"
-
-      # orport is reported as zero if unset
-
-      if self.vals["tor/or_port"] == "0":
-        self.vals["tor/or_port"] = ""
-
-      # overwrite address if ORListenAddress is set (and possibly or_port too)
-
-      self.vals["tor/orListenAddr"] = ""
-      listen_addr = conn.get_option("ORListenAddress", None)
-
-      if listen_addr:
-        if ":" in listen_addr:
-          # both ip and port overwritten
-          self.vals["tor/orListenAddr"] = listen_addr[:listen_addr.find(":")]
-          self.vals["tor/or_port"] = listen_addr[listen_addr.find(":") + 1:]
-        else:
-          self.vals["tor/orListenAddr"] = listen_addr
-
-      # fetch exit policy (might span over multiple lines)
-
-      policy_entries = []
-
-      for exit_policy in conn.get_option("ExitPolicy", [], True):
-        policy_entries += [policy.strip() for policy in exit_policy.split(",")]
-
-      self.vals["tor/exit_policy"] = ", ".join(policy_entries)
-
-      # file descriptor limit for the process, if this can't be determined
-      # then the limit is None
-
-      fd_limit, fd_is_estimate = conn.get_my_file_descriptor_limit()
-      self.vals["tor/fd_limit"] = fd_limit
-      self.vals["tor/isFdLimitEstimate"] = fd_is_estimate
-
-      # system information
-
-      uname_vals = os.uname()
-      self.vals["sys/hostname"] = uname_vals[1]
-      self.vals["sys/os"] = uname_vals[0]
-      self.vals["sys/version"] = uname_vals[2]
-
-      self.vals["tor/pid"] = conn.controller.get_pid("")
-
-      start_time = conn.get_start_time()
-      self.vals["tor/start_time"] = start_time if start_time else ""
-
-      # reverts volatile parameters to defaults
-
-      self.vals["tor/fingerprint"] = "Unknown"
-      self.vals["tor/flags"] = []
-      self.vals["tor/fd_used"] = 0
-      self.vals["stat/%torCpu"] = "0"
-      self.vals["stat/%armCpu"] = "0"
-      self.vals["stat/rss"] = "0"
-      self.vals["stat/%mem"] = "0"
-
-    # sets volatile parameters
-    # TODO: This can change, being reported by STATUS_SERVER -> EXTERNAL_ADDRESS
-    # events. Introduce caching via torTools?
-
-    self.vals["tor/address"] = conn.get_info("address", "")
-
-    self.vals["tor/fingerprint"] = conn.get_info("fingerprint", self.vals["tor/fingerprint"])
-    self.vals["tor/flags"] = conn.get_my_flags(self.vals["tor/flags"])
-
-    # Updates file descriptor usage and logs if the usage is high. If we don't
-    # have a known limit or it's obviously faulty (being lower than our
-    # current usage) then omit file descriptor functionality.
-
-    if self.vals["tor/fd_limit"]:
-      fd_used = conn.get_my_file_descriptor_usage()
-
-      if fd_used and fd_used <= self.vals["tor/fd_limit"]:
-        self.vals["tor/fd_used"] = fd_used
-      else:
-        self.vals["tor/fd_used"] = 0
-
-    if self.vals["tor/fd_used"] and self.vals["tor/fd_limit"]:
-      fd_percent = 100 * self.vals["tor/fd_used"] / self.vals["tor/fd_limit"]
-      estimated_label = " estimated" if self.vals["tor/isFdLimitEstimate"] else ""
-      msg = "Tor's%s file descriptor usage is at %i%%." % (estimated_label, fd_percent)
-
-      if fd_percent >= 90 and not self._is_fd_ninety_percent_warned:
-        self._is_fd_sixty_percent_warned, self._is_fd_ninety_percent_warned = True, True
-        msg += " If you run out Tor will be unable to continue functioning."
-        log.warn(msg)
-      elif fd_percent >= 60 and not self._is_fd_sixty_percent_warned:
-        self._is_fd_sixty_percent_warned = True
-        log.notice(msg)
-
-    # ps or proc derived resource usage stats
-
-    if self.vals["tor/pid"]:
-      resource_tracker = arm.util.tracker.get_resource_tracker()
-
-      resources = resource_tracker.get_resource_usage()
-      self._last_resource_fetch = resource_tracker.run_counter()
-      self.vals["stat/%torCpu"] = "%0.1f" % (100 * resources.cpu_sample)
-      self.vals["stat/rss"] = str(resources.memory_bytes)
-      self.vals["stat/%mem"] = "%0.1f" % (100 * resources.memory_percent)
-
-    # determines the cpu time for the arm process (including user and system
-    # time of both the primary and child processes)
-
-    total_arm_cpu_time, current_time = sum(os.times()[:3]), time.time()
-    arm_cpu_telta = total_arm_cpu_time - self._arm_cpu_sampling[0]
-    arm_time_delta = current_time - self._arm_cpu_sampling[1]
-    python_cpu_time = arm_cpu_telta / arm_time_delta
-    sys_call_cpu_time = 0.0  # TODO: add a wrapper around call() to get this
-    self.vals["stat/%armCpu"] = "%0.1f" % (100 * (python_cpu_time + sys_call_cpu_time))
-    self._arm_cpu_sampling = (total_arm_cpu_time, current_time)
-
-    self._last_update = current_time
-    self.vals_lock.release()
diff --git a/arm/header_panel.py b/arm/header_panel.py
new file mode 100644
index 0000000..09c12a8
--- /dev/null
+++ b/arm/header_panel.py
@@ -0,0 +1,679 @@
+"""
+Top panel for every page, containing basic system and tor related information.
+If there's room available then this expands to present its information in two
+columns, otherwise it's laid out as follows:
+  arm - <hostname> (<os> <sys/version>)         Tor <tor/version> (<new, old, recommended, etc>)
+  <nickname> - <address>:<or_port>, [Dir Port: <dir_port>, ]Control Port (<open, password, cookie>): <control_port>
+  cpu: <cpu%> mem: <mem> (<mem%>) uid: <uid> uptime: <upmin>:<upsec>
+  fingerprint: <fingerprint>
+
+Example:
+  arm - odin (Linux 2.6.24-24-generic)         Tor 0.2.1.19 (recommended)
+  odin - 76.104.132.98:9001, Dir Port: 9030, Control Port (cookie): 9051
+  cpu: 14.6%    mem: 42 MB (4.2%)    pid: 20060   uptime: 48:27
+  fingerprint: BDAD31F6F318E0413833E8EBDA956F76E4D66788
+"""
+
+import os
+import time
+import curses
+import threading
+
+import arm.util.tracker
+
+from stem.control import State
+from stem.util import conf, log, str_tools
+
+import arm.starter
+import arm.popups
+import arm.controller
+
+from util import panel, tor_tools, ui_tools
+
+# minimum width for which panel attempts to double up contents (two columns to
+# better use screen real estate)
+
+MIN_DUAL_COL_WIDTH = 141
+
+FLAG_COLORS = {
+  "Authority": "white",
+  "BadExit": "red",
+  "BadDirectory": "red",
+  "Exit": "cyan",
+  "Fast": "yellow",
+  "Guard": "green",
+  "HSDir": "magenta",
+  "Named": "blue",
+  "Stable": "blue",
+  "Running": "yellow",
+  "Unnamed": "magenta",
+  "Valid": "green",
+  "V2Dir": "cyan",
+  "V3Dir": "white",
+}
+
+VERSION_STATUS_COLORS = {
+  "new": "blue",
+  "new in series": "blue",
+  "obsolete": "red",
+  "recommended": "green",
+  "old": "red",
+  "unrecommended": "red",
+  "unknown": "cyan",
+}
+
+CONFIG = conf.config_dict("arm", {
+  "features.showFdUsage": False,
+})
+
+
+class HeaderPanel(panel.Panel, threading.Thread):
+  """
+  Top area contenting tor settings and system information. Stats are stored in
+  the vals mapping, keys including:
+    tor/  version, versionStatus, nickname, or_port, dir_port, control_port,
+          socketPath, exit_policy, isAuthPassword (bool), isAuthCookie (bool),
+          orListenAddr, *address, *fingerprint, *flags, pid, start_time,
+          *fd_used, fd_limit, isFdLimitEstimate
+    sys/  hostname, os, version
+    stat/ *%torCpu, *%armCpu, *rss, *%mem
+
+  * volatile parameter that'll be reset on each update
+  """
+
+  def __init__(self, stdscr, start_time):
+    panel.Panel.__init__(self, stdscr, "header", 0)
+    threading.Thread.__init__(self)
+    self.setDaemon(True)
+
+    self._is_tor_connected = tor_tools.get_conn().is_alive()
+    self._last_update = -1       # time the content was last revised
+    self._halt = False           # terminates thread if true
+    self._cond = threading.Condition()  # used for pausing the thread
+
+    # Time when the panel was paused or tor was stopped. This is used to
+    # freeze the uptime statistic (uptime increments normally when None).
+
+    self._halt_time = None
+
+    # The last arm cpu usage sampling taken. This is a tuple of the form:
+    # (total arm cpu time, sampling timestamp)
+    #
+    # The initial cpu total should be zero. However, at startup the cpu time
+    # in practice is often greater than the real time causing the initially
+    # reported cpu usage to be over 100% (which shouldn't be possible on
+    # single core systems).
+    #
+    # Setting the initial cpu total to the value at this panel's init tends to
+    # give smoother results (staying in the same ballpark as the second
+    # sampling) so fudging the numbers this way for now.
+
+    self._arm_cpu_sampling = (sum(os.times()[:3]), start_time)
+
+    # Last sampling received from the ResourceTracker, used to detect when it
+    # changes.
+
+    self._last_resource_fetch = -1
+
+    # flag to indicate if we've already given file descriptor warnings
+
+    self._is_fd_sixty_percent_warned = False
+    self._is_fd_ninety_percent_warned = False
+
+    self.vals = {}
+    self.vals_lock = threading.RLock()
+    self._update(True)
+
+    # listens for tor reload (sighup) events
+
+    tor_tools.get_conn().add_status_listener(self.reset_listener)
+
+  def get_height(self):
+    """
+    Provides the height of the content, which is dynamically determined by the
+    panel's maximum width.
+    """
+
+    is_wide = self.get_parent().getmaxyx()[1] >= MIN_DUAL_COL_WIDTH
+
+    if self.vals["tor/or_port"]:
+      return 4 if is_wide else 6
+    else:
+      return 3 if is_wide else 4
+
+  def send_newnym(self):
+    """
+    Requests a new identity and provides a visual queue.
+    """
+
+    tor_tools.get_conn().send_newnym()
+
+    # If we're wide then the newnym label in this panel will give an
+    # indication that the signal was sent. Otherwise use a msg.
+
+    is_wide = self.get_parent().getmaxyx()[1] >= MIN_DUAL_COL_WIDTH
+
+    if not is_wide:
+      arm.popups.show_msg("Requesting a new identity", 1)
+
+  def handle_key(self, key):
+    is_keystroke_consumed = True
+
+    if key in (ord('n'), ord('N')) and tor_tools.get_conn().is_newnym_available():
+      self.send_newnym()
+    elif key in (ord('r'), ord('R')) and not self._is_tor_connected:
+      #oldSocket = tor_tools.get_conn().get_controller().get_socket()
+      #
+      #controller = None
+      #allowPortConnection, allowSocketConnection, _ = starter.allowConnectionTypes()
+      #
+      #if os.path.exists(CONFIG["startup.interface.socket"]) and allowSocketConnection:
+      #  try:
+      #    # TODO: um... what about passwords?
+      #    controller = Controller.from_socket_file(CONFIG["startup.interface.socket"])
+      #    controller.authenticate()
+      #  except (IOError, stem.SocketError), exc:
+      #    controller = None
+      #
+      #    if not allowPortConnection:
+      #      arm.popups.show_msg("Unable to reconnect (%s)" % exc, 3)
+      #elif not allowPortConnection:
+      #  arm.popups.show_msg("Unable to reconnect (socket '%s' doesn't exist)" % CONFIG["startup.interface.socket"], 3)
+      #
+      #if not controller and allowPortConnection:
+      #  # TODO: This has diverged from starter.py's connection, for instance it
+      #  # doesn't account for relative cookie paths or multiple authentication
+      #  # methods. We can't use the starter.py's connection function directly
+      #  # due to password prompts, but we could certainly make this mess more
+      #  # manageable.
+      #
+      #  try:
+      #    ctlAddr, ctl_port = CONFIG["startup.interface.ip_address"], CONFIG["startup.interface.port"]
+      #    controller = Controller.from_port(ctlAddr, ctl_port)
+      #
+      #    try:
+      #      controller.authenticate()
+      #    except stem.connection.MissingPassword:
+      #      controller.authenticate(authValue) # already got the password above
+      #  except Exception, exc:
+      #    controller = None
+      #
+      #if controller:
+      #  tor_tools.get_conn().init(controller)
+      #  log.notice("Reconnected to Tor's control port")
+      #  arm.popups.show_msg("Tor reconnected", 1)
+
+      pass
+    else:
+      is_keystroke_consumed = False
+
+    return is_keystroke_consumed
+
+  def draw(self, width, height):
+    self.vals_lock.acquire()
+    is_wide = width + 1 >= MIN_DUAL_COL_WIDTH
+
+    # space available for content
+
+    if is_wide:
+      left_width = max(width / 2, 77)
+      right_width = width - left_width
+    else:
+      left_width = right_width = width
+
+    # Line 1 / Line 1 Left (system and tor version information)
+
+    sys_name_label = "arm - %s" % self.vals["sys/hostname"]
+    content_space = min(left_width, 40)
+
+    if len(sys_name_label) + 10 <= content_space:
+      sys_type_label = "%s %s" % (self.vals["sys/os"], self.vals["sys/version"])
+      sys_type_label = ui_tools.crop_str(sys_type_label, content_space - len(sys_name_label) - 3, 4)
+      self.addstr(0, 0, "%s (%s)" % (sys_name_label, sys_type_label))
+    else:
+      self.addstr(0, 0, ui_tools.crop_str(sys_name_label, content_space))
+
+    content_space = left_width - 43
+
+    if 7 + len(self.vals["tor/version"]) + len(self.vals["tor/versionStatus"]) <= content_space:
+      if self.vals["tor/version"] != "Unknown":
+        version_color = VERSION_STATUS_COLORS[self.vals["tor/versionStatus"]] if self.vals["tor/versionStatus"] in VERSION_STATUS_COLORS else "white"
+
+        label_prefix = "Tor %s (" % self.vals["tor/version"]
+        self.addstr(0, 43, label_prefix)
+        self.addstr(0, 43 + len(label_prefix), self.vals["tor/versionStatus"], ui_tools.get_color(version_color))
+        self.addstr(0, 43 + len(label_prefix) + len(self.vals["tor/versionStatus"]), ")")
+    elif 11 <= content_space:
+      self.addstr(0, 43, ui_tools.crop_str("Tor %s" % self.vals["tor/version"], content_space, 4))
+
+    # Line 2 / Line 2 Left (tor ip/port information)
+
+    x, include_control_port = 0, True
+
+    if self.vals["tor/or_port"]:
+      my_address = "Unknown"
+
+      if self.vals["tor/orListenAddr"]:
+        my_address = self.vals["tor/orListenAddr"]
+      elif self.vals["tor/address"]:
+        my_address = self.vals["tor/address"]
+
+      # acting as a relay (we can assume certain parameters are set
+
+      dir_port_label = ", Dir Port: %s" % self.vals["tor/dir_port"] if self.vals["tor/dir_port"] != "0" else ""
+
+      for label in (self.vals["tor/nickname"], " - " + my_address, ":" + self.vals["tor/or_port"], dir_port_label):
+        if x + len(label) <= left_width:
+          self.addstr(1, x, label)
+          x += len(label)
+        else:
+          break
+    else:
+      # non-relay (client only)
+
+      if self._is_tor_connected:
+        self.addstr(1, x, "Relaying Disabled", ui_tools.get_color("cyan"))
+        x += 17
+      else:
+        status_time = tor_tools.get_conn().controller.get_latest_heartbeat()
+
+        if status_time:
+          status_time_label = time.strftime("%H:%M %m/%d/%Y, ", time.localtime(status_time))
+        else:
+          status_time_label = ""  # never connected to tor
+
+        self.addstr(1, x, "Tor Disconnected", curses.A_BOLD | ui_tools.get_color("red"))
+        self.addstr(1, x + 16, " (%spress r to reconnect)" % status_time_label)
+        x += 39 + len(status_time_label)
+        include_control_port = False
+
+    if include_control_port:
+      if self.vals["tor/control_port"] == "0":
+        # connected via a control socket
+        self.addstr(1, x, ", Control Socket: %s" % self.vals["tor/socketPath"])
+      else:
+        if self.vals["tor/isAuthPassword"]:
+          auth_type = "password"
+        elif self.vals["tor/isAuthCookie"]:
+          auth_type = "cookie"
+        else:
+          auth_type = "open"
+
+        if x + 19 + len(self.vals["tor/control_port"]) + len(auth_type) <= left_width:
+          auth_color = "red" if auth_type == "open" else "green"
+          self.addstr(1, x, ", Control Port (")
+          self.addstr(1, x + 16, auth_type, ui_tools.get_color(auth_color))
+          self.addstr(1, x + 16 + len(auth_type), "): %s" % self.vals["tor/control_port"])
+        elif x + 16 + len(self.vals["tor/control_port"]) <= left_width:
+          self.addstr(1, 0, ", Control Port: %s" % self.vals["tor/control_port"])
+
+    # Line 3 / Line 1 Right (system usage info)
+
+    y, x = (0, left_width) if is_wide else (2, 0)
+
+    if self.vals["stat/rss"] != "0":
+      memory_label = str_tools.get_size_label(int(self.vals["stat/rss"]))
+    else:
+      memory_label = "0"
+
+    uptime_label = ""
+
+    if self.vals["tor/start_time"]:
+      if self.is_paused() or not self._is_tor_connected:
+        # freeze the uptime when paused or the tor process is stopped
+        uptime_label = str_tools.get_short_time_label(self.get_pause_time() - self.vals["tor/start_time"])
+      else:
+        uptime_label = str_tools.get_short_time_label(time.time() - self.vals["tor/start_time"])
+
+    sys_fields = ((0, "cpu: %s%% tor, %s%% arm" % (self.vals["stat/%torCpu"], self.vals["stat/%armCpu"])),
+                  (27, "mem: %s (%s%%)" % (memory_label, self.vals["stat/%mem"])),
+                  (47, "pid: %s" % (self.vals["tor/pid"] if self._is_tor_connected else "")),
+                  (59, "uptime: %s" % uptime_label))
+
+    for (start, label) in sys_fields:
+      if start + len(label) <= right_width:
+        self.addstr(y, x + start, label)
+      else:
+        break
+
+    if self.vals["tor/or_port"]:
+      # Line 4 / Line 2 Right (fingerprint, and possibly file descriptor usage)
+
+      y, x = (1, left_width) if is_wide else (3, 0)
+
+      fingerprint_label = ui_tools.crop_str("fingerprint: %s" % self.vals["tor/fingerprint"], width)
+      self.addstr(y, x, fingerprint_label)
+
+      # if there's room and we're able to retrieve both the file descriptor
+      # usage and limit then it might be presented
+
+      if width - x - 59 >= 20 and self.vals["tor/fd_used"] and self.vals["tor/fd_limit"]:
+        # display file descriptor usage if we're either configured to do so or
+        # running out
+
+        fd_percent = 100 * self.vals["tor/fd_used"] / self.vals["tor/fd_limit"]
+
+        if fd_percent >= 60 or CONFIG["features.showFdUsage"]:
+          fd_percentLabel, fd_percent_format = "%i%%" % fd_percent, curses.A_NORMAL
+
+          if fd_percent >= 95:
+            fd_percent_format = curses.A_BOLD | ui_tools.get_color("red")
+          elif fd_percent >= 90:
+            fd_percent_format = ui_tools.get_color("red")
+          elif fd_percent >= 60:
+            fd_percent_format = ui_tools.get_color("yellow")
+
+          estimate_char = "?" if self.vals["tor/isFdLimitEstimate"] else ""
+          base_label = "file desc: %i / %i%s (" % (self.vals["tor/fd_used"], self.vals["tor/fd_limit"], estimate_char)
+
+          self.addstr(y, x + 59, base_label)
+          self.addstr(y, x + 59 + len(base_label), fd_percentLabel, fd_percent_format)
+          self.addstr(y, x + 59 + len(base_label) + len(fd_percentLabel), ")")
+
+      # Line 5 / Line 3 Left (flags)
+
+      if self._is_tor_connected:
+        y, x = (2 if is_wide else 4, 0)
+        self.addstr(y, x, "flags: ")
+        x += 7
+
+        if len(self.vals["tor/flags"]) > 0:
+          for i in range(len(self.vals["tor/flags"])):
+            flag = self.vals["tor/flags"][i]
+            flag_color = FLAG_COLORS[flag] if flag in FLAG_COLORS.keys() else "white"
+
+            self.addstr(y, x, flag, curses.A_BOLD | ui_tools.get_color(flag_color))
+            x += len(flag)
+
+            if i < len(self.vals["tor/flags"]) - 1:
+              self.addstr(y, x, ", ")
+              x += 2
+        else:
+          self.addstr(y, x, "none", curses.A_BOLD | ui_tools.get_color("cyan"))
+      else:
+        y = 2 if is_wide else 4
+        status_time = tor_tools.get_conn().controller.get_latest_heartbeat()
+        status_time_label = time.strftime("%H:%M %m/%d/%Y", time.localtime(status_time))
+        self.addstr(y, 0, "Tor Disconnected", curses.A_BOLD | ui_tools.get_color("red"))
+        self.addstr(y, 16, " (%s) - press r to reconnect" % status_time_label)
+
+      # Undisplayed / Line 3 Right (exit policy)
+
+      if is_wide:
+        exit_policy = self.vals["tor/exit_policy"]
+
+        # adds note when default exit policy is appended
+
+        if exit_policy == "":
+          exit_policy = "<default>"
+        elif not exit_policy.endswith((" *:*", " *")):
+          exit_policy += ", <default>"
+
+        self.addstr(2, left_width, "exit policy: ")
+        x = left_width + 13
+
+        # color codes accepts to be green, rejects to be red, and default marker to be cyan
+
+        is_simple = len(exit_policy) > right_width - 13
+        policies = exit_policy.split(", ")
+
+        for i in range(len(policies)):
+          policy = policies[i].strip()
+          policy_label = policy.replace("accept", "").replace("reject", "").strip() if is_simple else policy
+
+          policy_color = "white"
+
+          if policy.startswith("accept"):
+            policy_color = "green"
+          elif policy.startswith("reject"):
+            policy_color = "red"
+          elif policy.startswith("<default>"):
+            policy_color = "cyan"
+
+          self.addstr(2, x, policy_label, curses.A_BOLD | ui_tools.get_color(policy_color))
+          x += len(policy_label)
+
+          if i < len(policies) - 1:
+            self.addstr(2, x, ", ")
+            x += 2
+    else:
+      # (Client only) Undisplayed / Line 2 Right (new identity option)
+
+      if is_wide:
+        conn = tor_tools.get_conn()
+        newnym_wait = conn.get_newnym_wait()
+
+        msg = "press 'n' for a new identity"
+
+        if newnym_wait > 0:
+          plural_label = "s" if newnym_wait > 1 else ""
+          msg = "building circuits, available again in %i second%s" % (newnym_wait, plural_label)
+
+        self.addstr(1, left_width, msg)
+
+    self.vals_lock.release()
+
+  def get_pause_time(self):
+    """
+    Provides the time Tor stopped if it isn't running. Otherwise this is the
+    time we were last paused.
+    """
+
+    if self._halt_time:
+      return self._halt_time
+    else:
+      return panel.Panel.get_pause_time(self)
+
+  def run(self):
+    """
+    Keeps stats updated, checking for new information at a set rate.
+    """
+
+    last_draw = time.time() - 1
+
+    while not self._halt:
+      current_time = time.time()
+
+      if self.is_paused() or current_time - last_draw < 1 or not self._is_tor_connected:
+        self._cond.acquire()
+
+        if not self._halt:
+          self._cond.wait(0.2)
+
+        self._cond.release()
+      else:
+        # Update the volatile attributes (cpu, memory, flags, etc) if we have
+        # a new resource usage sampling (the most dynamic stat) or its been
+        # twenty seconds since last fetched (so we still refresh occasionally
+        # when resource fetches fail).
+        #
+        # Otherwise, just redraw the panel to change the uptime field.
+
+        is_changed = False
+
+        if self.vals["tor/pid"]:
+          resource_tracker = arm.util.tracker.get_resource_tracker()
+          is_changed = self._last_resource_fetch != resource_tracker.run_counter()
+
+        if is_changed or current_time - self._last_update >= 20:
+          self._update()
+
+        self.redraw(True)
+        last_draw += 1
+
+  def stop(self):
+    """
+    Halts further resolutions and terminates the thread.
+    """
+
+    self._cond.acquire()
+    self._halt = True
+    self._cond.notifyAll()
+    self._cond.release()
+
+  def reset_listener(self, controller, event_type, _):
+    """
+    Updates static parameters on tor reload (sighup) events.
+    """
+
+    if event_type in (State.INIT, State.RESET):
+      initial_height = self.get_height()
+      self._is_tor_connected = True
+      self._halt_time = None
+      self._update(True)
+
+      if self.get_height() != initial_height:
+        # We're toggling between being a relay and client, causing the height
+        # of this panel to change. Redraw all content so we don't get
+        # overlapping content.
+
+        arm.controller.get_controller().redraw()
+      else:
+        # just need to redraw ourselves
+        self.redraw(True)
+    elif event_type == State.CLOSED:
+      self._is_tor_connected = False
+      self._halt_time = time.time()
+      self._update()
+      self.redraw(True)
+
+  def _update(self, set_static=False):
+    """
+    Updates stats in the vals mapping. By default this just revises volatile
+    attributes.
+
+    Arguments:
+      set_static - resets all parameters, including relatively static values
+    """
+
+    self.vals_lock.acquire()
+    conn = tor_tools.get_conn()
+
+    if set_static:
+      # version is truncated to first part, for instance:
+      # 0.2.2.13-alpha (git-feb8c1b5f67f2c6f) -> 0.2.2.13-alpha
+
+      self.vals["tor/version"] = conn.get_info("version", "Unknown").split()[0]
+      self.vals["tor/versionStatus"] = conn.get_info("status/version/current", "Unknown")
+      self.vals["tor/nickname"] = conn.get_option("Nickname", "")
+      self.vals["tor/or_port"] = conn.get_option("ORPort", "0")
+      self.vals["tor/dir_port"] = conn.get_option("DirPort", "0")
+      self.vals["tor/control_port"] = conn.get_option("ControlPort", "0")
+      self.vals["tor/socketPath"] = conn.get_option("ControlSocket", "")
+      self.vals["tor/isAuthPassword"] = conn.get_option("HashedControlPassword", None) is not None
+      self.vals["tor/isAuthCookie"] = conn.get_option("CookieAuthentication", None) == "1"
+
+      # orport is reported as zero if unset
+
+      if self.vals["tor/or_port"] == "0":
+        self.vals["tor/or_port"] = ""
+
+      # overwrite address if ORListenAddress is set (and possibly or_port too)
+
+      self.vals["tor/orListenAddr"] = ""
+      listen_addr = conn.get_option("ORListenAddress", None)
+
+      if listen_addr:
+        if ":" in listen_addr:
+          # both ip and port overwritten
+          self.vals["tor/orListenAddr"] = listen_addr[:listen_addr.find(":")]
+          self.vals["tor/or_port"] = listen_addr[listen_addr.find(":") + 1:]
+        else:
+          self.vals["tor/orListenAddr"] = listen_addr
+
+      # fetch exit policy (might span over multiple lines)
+
+      policy_entries = []
+
+      for exit_policy in conn.get_option("ExitPolicy", [], True):
+        policy_entries += [policy.strip() for policy in exit_policy.split(",")]
+
+      self.vals["tor/exit_policy"] = ", ".join(policy_entries)
+
+      # file descriptor limit for the process, if this can't be determined
+      # then the limit is None
+
+      fd_limit, fd_is_estimate = conn.get_my_file_descriptor_limit()
+      self.vals["tor/fd_limit"] = fd_limit
+      self.vals["tor/isFdLimitEstimate"] = fd_is_estimate
+
+      # system information
+
+      uname_vals = os.uname()
+      self.vals["sys/hostname"] = uname_vals[1]
+      self.vals["sys/os"] = uname_vals[0]
+      self.vals["sys/version"] = uname_vals[2]
+
+      self.vals["tor/pid"] = conn.controller.get_pid("")
+
+      start_time = conn.get_start_time()
+      self.vals["tor/start_time"] = start_time if start_time else ""
+
+      # reverts volatile parameters to defaults
+
+      self.vals["tor/fingerprint"] = "Unknown"
+      self.vals["tor/flags"] = []
+      self.vals["tor/fd_used"] = 0
+      self.vals["stat/%torCpu"] = "0"
+      self.vals["stat/%armCpu"] = "0"
+      self.vals["stat/rss"] = "0"
+      self.vals["stat/%mem"] = "0"
+
+    # sets volatile parameters
+    # TODO: This can change, being reported by STATUS_SERVER -> EXTERNAL_ADDRESS
+    # events. Introduce caching via tor_tools?
+
+    self.vals["tor/address"] = conn.get_info("address", "")
+
+    self.vals["tor/fingerprint"] = conn.get_info("fingerprint", self.vals["tor/fingerprint"])
+    self.vals["tor/flags"] = conn.get_my_flags(self.vals["tor/flags"])
+
+    # Updates file descriptor usage and logs if the usage is high. If we don't
+    # have a known limit or it's obviously faulty (being lower than our
+    # current usage) then omit file descriptor functionality.
+
+    if self.vals["tor/fd_limit"]:
+      fd_used = conn.get_my_file_descriptor_usage()
+
+      if fd_used and fd_used <= self.vals["tor/fd_limit"]:
+        self.vals["tor/fd_used"] = fd_used
+      else:
+        self.vals["tor/fd_used"] = 0
+
+    if self.vals["tor/fd_used"] and self.vals["tor/fd_limit"]:
+      fd_percent = 100 * self.vals["tor/fd_used"] / self.vals["tor/fd_limit"]
+      estimated_label = " estimated" if self.vals["tor/isFdLimitEstimate"] else ""
+      msg = "Tor's%s file descriptor usage is at %i%%." % (estimated_label, fd_percent)
+
+      if fd_percent >= 90 and not self._is_fd_ninety_percent_warned:
+        self._is_fd_sixty_percent_warned, self._is_fd_ninety_percent_warned = True, True
+        msg += " If you run out Tor will be unable to continue functioning."
+        log.warn(msg)
+      elif fd_percent >= 60 and not self._is_fd_sixty_percent_warned:
+        self._is_fd_sixty_percent_warned = True
+        log.notice(msg)
+
+    # ps or proc derived resource usage stats
+
+    if self.vals["tor/pid"]:
+      resource_tracker = arm.util.tracker.get_resource_tracker()
+
+      resources = resource_tracker.get_resource_usage()
+      self._last_resource_fetch = resource_tracker.run_counter()
+      self.vals["stat/%torCpu"] = "%0.1f" % (100 * resources.cpu_sample)
+      self.vals["stat/rss"] = str(resources.memory_bytes)
+      self.vals["stat/%mem"] = "%0.1f" % (100 * resources.memory_percent)
+
+    # determines the cpu time for the arm process (including user and system
+    # time of both the primary and child processes)
+
+    total_arm_cpu_time, current_time = sum(os.times()[:3]), time.time()
+    arm_cpu_telta = total_arm_cpu_time - self._arm_cpu_sampling[0]
+    arm_time_delta = current_time - self._arm_cpu_sampling[1]
+    python_cpu_time = arm_cpu_telta / arm_time_delta
+    sys_call_cpu_time = 0.0  # TODO: add a wrapper around call() to get this
+    self.vals["stat/%armCpu"] = "%0.1f" % (100 * (python_cpu_time + sys_call_cpu_time))
+    self._arm_cpu_sampling = (total_arm_cpu_time, current_time)
+
+    self._last_update = current_time
+    self.vals_lock.release()
diff --git a/arm/logPanel.py b/arm/logPanel.py
deleted file mode 100644
index 894322b..0000000
--- a/arm/logPanel.py
+++ /dev/null
@@ -1,1371 +0,0 @@
-"""
-Panel providing a chronological log of events its been configured to listen
-for. This provides prepopulation from the log file and supports filtering by
-regular expressions.
-"""
-
-import re
-import os
-import time
-import curses
-import logging
-import threading
-
-import stem
-from stem.control import State
-from stem.response import events
-from stem.util import conf, log, system
-
-import arm.arguments
-import arm.popups
-from arm import __version__
-from arm.util import panel, torTools, uiTools
-
-RUNLEVEL_EVENT_COLOR = {
-  log.DEBUG: "magenta",
-  log.INFO: "blue",
-  log.NOTICE: "green",
-  log.WARN: "yellow",
-  log.ERR: "red",
-}
-
-DAYBREAK_EVENT = "DAYBREAK"  # special event for marking when the date changes
-TIMEZONE_OFFSET = time.altzone if time.localtime()[8] else time.timezone
-
-ENTRY_INDENT = 2  # spaces an entry's message is indented after the first line
-
-
-def conf_handler(key, value):
-  if key == "features.log.max_lines_per_entry":
-    return max(1, value)
-  elif key == "features.log.prepopulateReadLimit":
-    return max(0, value)
-  elif key == "features.log.maxRefreshRate":
-    return max(10, value)
-  elif key == "cache.log_panel.size":
-    return max(1000, value)
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.log_file": "",
-  "features.log.showDateDividers": True,
-  "features.log.showDuplicateEntries": False,
-  "features.log.entryDuration": 7,
-  "features.log.max_lines_per_entry": 6,
-  "features.log.prepopulate": True,
-  "features.log.prepopulateReadLimit": 5000,
-  "features.log.maxRefreshRate": 300,
-  "features.log.regex": [],
-  "cache.log_panel.size": 1000,
-  "msg.misc.event_types": '',
-  "tor.chroot": '',
-}, conf_handler)
-
-DUPLICATE_MSG = " [%i duplicate%s hidden]"
-
-# The height of the drawn content is estimated based on the last time we redrew
-# the panel. It's chiefly used for scrolling and the bar indicating its
-# position. Letting the estimate be too inaccurate results in a display bug, so
-# redraws the display if it's off by this threshold.
-
-CONTENT_HEIGHT_REDRAW_THRESHOLD = 3
-
-# static starting portion of common log entries, fetched from the config when
-# needed if None
-
-COMMON_LOG_MESSAGES = None
-
-# cached values and the arguments that generated it for the get_daybreaks and
-# get_duplicates functions
-
-CACHED_DAYBREAKS_ARGUMENTS = (None, None)  # events, current day
-CACHED_DAYBREAKS_RESULT = None
-CACHED_DUPLICATES_ARGUMENTS = None  # events
-CACHED_DUPLICATES_RESULT = None
-
-# duration we'll wait for the deduplication function before giving up (in ms)
-
-DEDUPLICATION_TIMEOUT = 100
-
-# maximum number of regex filters we'll remember
-
-MAX_REGEX_FILTERS = 5
-
-
-def days_since(timestamp = None):
-  """
-  Provides the number of days since the epoch converted to local time (rounded
-  down).
-
-  Arguments:
-    timestamp - unix timestamp to convert, current time if undefined
-  """
-
-  if timestamp is None:
-    timestamp = time.time()
-
-  return int((timestamp - TIMEZONE_OFFSET) / 86400)
-
-
-def load_log_messages():
-  """
-  Fetches a mapping of common log messages to their runlevels from the config.
-  """
-
-  global COMMON_LOG_MESSAGES
-  arm_config = conf.get_config("arm")
-
-  COMMON_LOG_MESSAGES = {}
-
-  for conf_key in arm_config.keys():
-    if conf_key.startswith("dedup."):
-      event_type = conf_key[4:].upper()
-      messages = arm_config.get(conf_key, [])
-      COMMON_LOG_MESSAGES[event_type] = messages
-
-
-def get_log_file_entries(runlevels, read_limit = None, add_limit = None):
-  """
-  Parses tor's log file for past events matching the given runlevels, providing
-  a list of log entries (ordered newest to oldest). Limiting the number of read
-  entries is suggested to avoid parsing everything from logs in the GB and TB
-  range.
-
-  Arguments:
-    runlevels - event types (DEBUG - ERR) to be returned
-    read_limit - max lines of the log file that'll be read (unlimited if None)
-    add_limit  - maximum entries to provide back (unlimited if None)
-  """
-
-  start_time = time.time()
-
-  if not runlevels:
-    return []
-
-  # checks tor's configuration for the log file's location (if any exists)
-
-  logging_types, logging_location = None, None
-
-  for logging_entry in torTools.get_conn().get_option("Log", [], True):
-    # looks for an entry like: notice file /var/log/tor/notices.log
-
-    entry_comp = logging_entry.split()
-
-    if entry_comp[1] == "file":
-      logging_types, logging_location = entry_comp[0], entry_comp[2]
-      break
-
-  if not logging_location:
-    return []
-
-  # includes the prefix for tor paths
-
-  logging_location = CONFIG['tor.chroot'] + logging_location
-
-  # if the runlevels argument is a superset of the log file then we can
-  # limit the read contents to the add_limit
-
-  runlevels = list(log.Runlevel)
-  logging_types = logging_types.upper()
-
-  if add_limit and (not read_limit or read_limit > add_limit):
-    if "-" in logging_types:
-      div_index = logging_types.find("-")
-      start_index = runlevels.index(logging_types[:div_index])
-      end_index = runlevels.index(logging_types[div_index + 1:])
-      log_file_run_levels = runlevels[start_index:end_index + 1]
-    else:
-      start_index = runlevels.index(logging_types)
-      log_file_run_levels = runlevels[start_index:]
-
-    # checks if runlevels we're reporting are a superset of the file's contents
-
-    is_file_subset = True
-
-    for runlevel_type in log_file_run_levels:
-      if runlevel_type not in runlevels:
-        is_file_subset = False
-        break
-
-    if is_file_subset:
-      read_limit = add_limit
-
-  # tries opening the log file, cropping results to avoid choking on huge logs
-
-  lines = []
-
-  try:
-    if read_limit:
-      lines = system.call("tail -n %i %s" % (read_limit, logging_location))
-
-      if not lines:
-        raise IOError()
-    else:
-      log_file = open(logging_location, "r")
-      lines = log_file.readlines()
-      log_file.close()
-  except IOError:
-    log.warn("Unable to read tor's log file: %s" % logging_location)
-
-  if not lines:
-    return []
-
-  logged_events = []
-  current_unix_time, current_local_time = time.time(), time.localtime()
-
-  for i in range(len(lines) - 1, -1, -1):
-    line = lines[i]
-
-    # entries look like:
-    # Jul 15 18:29:48.806 [notice] Parsing GEOIP file.
-
-    line_comp = line.split()
-
-    # Checks that we have all the components we expect. This could happen if
-    # we're either not parsing a tor log or in weird edge cases (like being
-    # out of disk space)
-
-    if len(line_comp) < 4:
-      continue
-
-    event_type = line_comp[3][1:-1].upper()
-
-    if event_type in runlevels:
-      # converts timestamp to unix time
-
-      timestamp = " ".join(line_comp[:3])
-
-      # strips the decimal seconds
-
-      if "." in timestamp:
-        timestamp = timestamp[:timestamp.find(".")]
-
-      # Ignoring wday and yday since they aren't used.
-      #
-      # Pretend the year is 2012, because 2012 is a leap year, and parsing a
-      # date with strptime fails if Feb 29th is passed without a year that's
-      # actually a leap year. We can't just use the current year, because we
-      # might be parsing old logs which didn't get rotated.
-      #
-      # https://trac.torproject.org/projects/tor/ticket/5265
-
-      timestamp = "2012 " + timestamp
-      event_time_comp = list(time.strptime(timestamp, "%Y %b %d %H:%M:%S"))
-      event_time_comp[8] = current_local_time.tm_isdst
-      event_time = time.mktime(event_time_comp)  # converts local to unix time
-
-      # The above is gonna be wrong if the logs are for the previous year. If
-      # the event's in the future then correct for this.
-
-      if event_time > current_unix_time + 60:
-        event_time_comp[0] -= 1
-        event_time = time.mktime(event_time_comp)
-
-      event_msg = " ".join(line_comp[4:])
-      logged_events.append(LogEntry(event_time, event_type, event_msg, RUNLEVEL_EVENT_COLOR[event_type]))
-
-    if "opening log file" in line:
-      break  # this entry marks the start of this tor instance
-
-  if add_limit:
-    logged_events = logged_events[:add_limit]
-
-  log.info("Read %i entries from tor's log file: %s (read limit: %i, runtime: %0.3f)" % (len(logged_events), logging_location, read_limit, time.time() - start_time))
-
-  return logged_events
-
-
-def get_daybreaks(events, ignore_time_for_cache = False):
-  """
-  Provides the input events back with special 'DAYBREAK_EVENT' markers inserted
-  whenever the date changed between log entries (or since the most recent
-  event). The timestamp matches the beginning of the day for the following
-  entry.
-
-  Arguments:
-    events             - chronologically ordered listing of events
-    ignore_time_for_cache - skips taking the day into consideration for providing
-                         cached results if true
-  """
-
-  global CACHED_DAYBREAKS_ARGUMENTS, CACHED_DAYBREAKS_RESULT
-
-  if not events:
-    return []
-
-  new_listing = []
-  current_day = days_since()
-  last_day = current_day
-
-  if CACHED_DAYBREAKS_ARGUMENTS[0] == events and \
-    (ignore_time_for_cache or CACHED_DAYBREAKS_ARGUMENTS[1] == current_day):
-    return list(CACHED_DAYBREAKS_RESULT)
-
-  for entry in events:
-    event_day = days_since(entry.timestamp)
-
-    if event_day != last_day:
-      marker_timestamp = (event_day * 86400) + TIMEZONE_OFFSET
-      new_listing.append(LogEntry(marker_timestamp, DAYBREAK_EVENT, "", "white"))
-
-    new_listing.append(entry)
-    last_day = event_day
-
-  CACHED_DAYBREAKS_ARGUMENTS = (list(events), current_day)
-  CACHED_DAYBREAKS_RESULT = list(new_listing)
-
-  return new_listing
-
-
-def get_duplicates(events):
-  """
-  Deduplicates a list of log entries, providing back a tuple listing with the
-  log entry and count of duplicates following it. Entries in different days are
-  not considered to be duplicates. This times out, returning None if it takes
-  longer than DEDUPLICATION_TIMEOUT.
-
-  Arguments:
-    events - chronologically ordered listing of events
-  """
-
-  global CACHED_DUPLICATES_ARGUMENTS, CACHED_DUPLICATES_RESULT
-
-  if CACHED_DUPLICATES_ARGUMENTS == events:
-    return list(CACHED_DUPLICATES_RESULT)
-
-  # loads common log entries from the config if they haven't been
-
-  if COMMON_LOG_MESSAGES is None:
-    load_log_messages()
-
-  start_time = time.time()
-  events_remaining = list(events)
-  return_events = []
-
-  while events_remaining:
-    entry = events_remaining.pop(0)
-    duplicate_indices = is_duplicate(entry, events_remaining, True)
-
-    # checks if the call timeout has been reached
-
-    if (time.time() - start_time) > DEDUPLICATION_TIMEOUT / 1000.0:
-      return None
-
-    # drops duplicate entries
-
-    duplicate_indices.reverse()
-
-    for i in duplicate_indices:
-      del events_remaining[i]
-
-    return_events.append((entry, len(duplicate_indices)))
-
-  CACHED_DUPLICATES_ARGUMENTS = list(events)
-  CACHED_DUPLICATES_RESULT = list(return_events)
-
-  return return_events
-
-
-def is_duplicate(event, event_set, get_duplicates = False):
-  """
-  True if the event is a duplicate for something in the event_set, false
-  otherwise. If the get_duplicates flag is set this provides the indices of
-  the duplicates instead.
-
-  Arguments:
-    event         - event to search for duplicates of
-    event_set      - set to look for the event in
-    get_duplicates - instead of providing back a boolean this gives a list of
-                    the duplicate indices in the event_set
-  """
-
-  duplicate_indices = []
-
-  for i in range(len(event_set)):
-    forward_entry = event_set[i]
-
-    # if showing dates then do duplicate detection for each day, rather
-    # than globally
-
-    if forward_entry.type == DAYBREAK_EVENT:
-      break
-
-    if event.type == forward_entry.type:
-      is_duplicate = False
-
-      if event.msg == forward_entry.msg:
-        is_duplicate = True
-      elif event.type in COMMON_LOG_MESSAGES:
-        for common_msg in COMMON_LOG_MESSAGES[event.type]:
-          # if it starts with an asterisk then check the whole message rather
-          # than just the start
-
-          if common_msg[0] == "*":
-            is_duplicate = common_msg[1:] in event.msg and common_msg[1:] in forward_entry.msg
-          else:
-            is_duplicate = event.msg.startswith(common_msg) and forward_entry.msg.startswith(common_msg)
-
-          if is_duplicate:
-            break
-
-      if is_duplicate:
-        if get_duplicates:
-          duplicate_indices.append(i)
-        else:
-          return True
-
-  if get_duplicates:
-    return duplicate_indices
-  else:
-    return False
-
-
-class LogEntry():
-  """
-  Individual log file entry, having the following attributes:
-    timestamp - unix timestamp for when the event occurred
-    event_type - event type that occurred ("INFO", "BW", "ARM_WARN", etc)
-    msg       - message that was logged
-    color     - color of the log entry
-  """
-
-  def __init__(self, timestamp, event_type, msg, color):
-    self.timestamp = timestamp
-    self.type = event_type
-    self.msg = msg
-    self.color = color
-    self._display_message = None
-
-  def get_display_message(self, include_date = False):
-    """
-    Provides the entry's message for the log.
-
-    Arguments:
-      include_date - appends the event's date to the start of the message
-    """
-
-    if include_date:
-      # not the common case so skip caching
-      entry_time = time.localtime(self.timestamp)
-      time_label = "%i/%i/%i %02i:%02i:%02i" % (entry_time[1], entry_time[2], entry_time[0], entry_time[3], entry_time[4], entry_time[5])
-      return "%s [%s] %s" % (time_label, self.type, self.msg)
-
-    if not self._display_message:
-      entry_time = time.localtime(self.timestamp)
-      self._display_message = "%02i:%02i:%02i [%s] %s" % (entry_time[3], entry_time[4], entry_time[5], self.type, self.msg)
-
-    return self._display_message
-
-
-class LogPanel(panel.Panel, threading.Thread, logging.Handler):
-  """
-  Listens for and displays tor, arm, and stem events. This can prepopulate
-  from tor's log file if it exists.
-  """
-
-  def __init__(self, stdscr, logged_events):
-    panel.Panel.__init__(self, stdscr, "log", 0)
-    logging.Handler.__init__(self, level = log.logging_level(log.DEBUG))
-
-    self.setFormatter(logging.Formatter(
-      fmt = '%(asctime)s [%(levelname)s] %(message)s',
-      datefmt = '%m/%d/%Y %H:%M:%S'),
-    )
-
-    threading.Thread.__init__(self)
-    self.setDaemon(True)
-
-    # Make sure that the msg.* messages are loaded. Lazy loading it later is
-    # fine, but this way we're sure it happens before warning about unused
-    # config options.
-
-    load_log_messages()
-
-    # regex filters the user has defined
-
-    self.filter_options = []
-
-    for filter in CONFIG["features.log.regex"]:
-      # checks if we can't have more filters
-
-      if len(self.filter_options) >= MAX_REGEX_FILTERS:
-        break
-
-      try:
-        re.compile(filter)
-        self.filter_options.append(filter)
-      except re.error as exc:
-        log.notice("Invalid regular expression pattern (%s): %s" % (exc, filter))
-
-    self.logged_events = []  # needs to be set before we receive any events
-
-    # restricts the input to the set of events we can listen to, and
-    # configures the controller to liten to them
-
-    self.logged_events = self.set_event_listening(logged_events)
-
-    self.set_pause_attr("msg_log")       # tracks the message log when we're paused
-    self.msg_log = []                    # log entries, sorted by the timestamp
-    self.regex_filter = None             # filter for presented log events (no filtering if None)
-    self.last_content_height = 0         # height of the rendered content when last drawn
-    self.log_file = None                 # file log messages are saved to (skipped if None)
-    self.scroll = 0
-
-    self._last_update = -1               # time the content was last revised
-    self._halt = False                   # terminates thread if true
-    self._cond = threading.Condition()   # used for pausing/resuming the thread
-
-    # restricts concurrent write access to attributes used to draw the display
-    # and pausing:
-    # msg_log, logged_events, regex_filter, scroll
-
-    self.vals_lock = threading.RLock()
-
-    # cached parameters (invalidated if arguments for them change)
-    # last set of events we've drawn with
-
-    self._last_logged_events = []
-
-    # _get_title (args: logged_events, regex_filter pattern, width)
-
-    self._title_cache = None
-    self._title_args = (None, None, None)
-
-    self.reprepopulate_events()
-
-    # leaving last_content_height as being too low causes initialization problems
-
-    self.last_content_height = len(self.msg_log)
-
-    # adds listeners for tor and stem events
-
-    conn = torTools.get_conn()
-    conn.add_status_listener(self._reset_listener)
-
-    # opens log file if we'll be saving entries
-
-    if CONFIG["features.log_file"]:
-      log_path = CONFIG["features.log_file"]
-
-      try:
-        # make dir if the path doesn't already exist
-
-        base_dir = os.path.dirname(log_path)
-
-        if not os.path.exists(base_dir):
-          os.makedirs(base_dir)
-
-        self.log_file = open(log_path, "a")
-        log.notice("arm %s opening log file (%s)" % (__version__, log_path))
-      except IOError as exc:
-        log.error("Unable to write to log file: %s" % exc.strerror)
-        self.log_file = None
-      except OSError as exc:
-        log.error("Unable to write to log file: %s" % exc)
-        self.log_file = None
-
-    stem_logger = log.get_logger()
-    stem_logger.addHandler(self)
-
-  def emit(self, record):
-    if record.levelname == "WARNING":
-      record.levelname = "WARN"
-
-    event_color = RUNLEVEL_EVENT_COLOR[record.levelname]
-    self.register_event(LogEntry(int(record.created), "ARM_%s" % record.levelname, record.msg, event_color))
-
-  def reprepopulate_events(self):
-    """
-    Clears the event log and repopulates it from the arm and tor backlogs.
-    """
-
-    self.vals_lock.acquire()
-
-    # clears the event log
-
-    self.msg_log = []
-
-    # fetches past tor events from log file, if available
-
-    if CONFIG["features.log.prepopulate"]:
-      set_runlevels = list(set.intersection(set(self.logged_events), set(list(log.Runlevel))))
-      read_limit = CONFIG["features.log.prepopulateReadLimit"]
-      add_limit = CONFIG["cache.log_panel.size"]
-
-      for entry in get_log_file_entries(set_runlevels, read_limit, add_limit):
-        self.msg_log.append(entry)
-
-    # crops events that are either too old, or more numerous than the caching size
-
-    self._trim_events(self.msg_log)
-
-    self.vals_lock.release()
-
-  def set_duplicate_visability(self, is_visible):
-    """
-    Sets if duplicate log entries are collaped or expanded.
-
-    Arguments:
-      is_visible - if true all log entries are shown, otherwise they're
-                   deduplicated
-    """
-
-    arm_config = conf.get_config("arm")
-    arm_config.set("features.log.showDuplicateEntries", str(is_visible))
-
-  def register_tor_event(self, event):
-    """
-    Translates a stem.response.event.Event instance into a LogEvent, and calls
-    register_event().
-    """
-
-    msg, color = ' '.join(str(event).split(' ')[1:]), "white"
-
-    if isinstance(event, events.CircuitEvent):
-      color = "yellow"
-    elif isinstance(event, events.BandwidthEvent):
-      color = "cyan"
-      msg = "READ: %i, WRITTEN: %i" % (event.read, event.written)
-    elif isinstance(event, events.LogEvent):
-      color = RUNLEVEL_EVENT_COLOR[event.runlevel]
-      msg = event.message
-    elif isinstance(event, events.NetworkStatusEvent):
-      color = "blue"
-    elif isinstance(event, events.NewConsensusEvent):
-      color = "magenta"
-    elif isinstance(event, events.GuardEvent):
-      color = "yellow"
-    elif not event.type in arm.arguments.TOR_EVENT_TYPES.values():
-      color = "red"  # unknown event type
-
-    self.register_event(LogEntry(event.arrived_at, event.type, msg, color))
-
-  def register_event(self, event):
-    """
-    Notes event and redraws log. If paused it's held in a temporary buffer.
-
-    Arguments:
-      event - LogEntry for the event that occurred
-    """
-
-    if not event.type in self.logged_events:
-      return
-
-    # strips control characters to avoid screwing up the terminal
-
-    event.msg = uiTools.get_printable(event.msg)
-
-    # note event in the log file if we're saving them
-
-    if self.log_file:
-      try:
-        self.log_file.write(event.get_display_message(True) + "\n")
-        self.log_file.flush()
-      except IOError as exc:
-        log.error("Unable to write to log file: %s" % exc.strerror)
-        self.log_file = None
-
-    self.vals_lock.acquire()
-    self.msg_log.insert(0, event)
-    self._trim_events(self.msg_log)
-
-    # notifies the display that it has new content
-
-    if not self.regex_filter or self.regex_filter.search(event.get_display_message()):
-      self._cond.acquire()
-      self._cond.notifyAll()
-      self._cond.release()
-
-    self.vals_lock.release()
-
-  def set_logged_events(self, event_types):
-    """
-    Sets the event types recognized by the panel.
-
-    Arguments:
-      event_types - event types to be logged
-    """
-
-    if event_types == self.logged_events:
-      return
-
-    self.vals_lock.acquire()
-
-    # configures the controller to listen for these tor events, and provides
-    # back a subset without anything we're failing to listen to
-
-    set_types = self.set_event_listening(event_types)
-    self.logged_events = set_types
-    self.redraw(True)
-    self.vals_lock.release()
-
-  def get_filter(self):
-    """
-    Provides our currently selected regex filter.
-    """
-
-    return self.filter_options[0] if self.regex_filter else None
-
-  def set_filter(self, log_filter):
-    """
-    Filters log entries according to the given regular expression.
-
-    Arguments:
-      log_filter - regular expression used to determine which messages are
-                  shown, None if no filter should be applied
-    """
-
-    if log_filter == self.regex_filter:
-      return
-
-    self.vals_lock.acquire()
-    self.regex_filter = log_filter
-    self.redraw(True)
-    self.vals_lock.release()
-
-  def make_filter_selection(self, selected_option):
-    """
-    Makes the given filter selection, applying it to the log and reorganizing
-    our filter selection.
-
-    Arguments:
-      selected_option - regex filter we've already added, None if no filter
-                       should be applied
-    """
-
-    if selected_option:
-      try:
-        self.set_filter(re.compile(selected_option))
-
-        # move selection to top
-
-        self.filter_options.remove(selected_option)
-        self.filter_options.insert(0, selected_option)
-      except re.error as exc:
-        # shouldn't happen since we've already checked validity
-
-        log.warn("Invalid regular expression ('%s': %s) - removing from listing" % (selected_option, exc))
-        self.filter_options.remove(selected_option)
-    else:
-      self.set_filter(None)
-
-  def show_filter_prompt(self):
-    """
-    Prompts the user to add a new regex filter.
-    """
-
-    regex_input = arm.popups.input_prompt("Regular expression: ")
-
-    if regex_input:
-      try:
-        self.set_filter(re.compile(regex_input))
-
-        if regex_input in self.filter_options:
-          self.filter_options.remove(regex_input)
-
-        self.filter_options.insert(0, regex_input)
-      except re.error as exc:
-        arm.popups.show_msg("Unable to compile expression: %s" % exc, 2)
-
-  def show_event_selection_prompt(self):
-    """
-    Prompts the user to select the events being listened for.
-    """
-
-    # allow user to enter new types of events to log - unchanged if left blank
-
-    popup, width, height = arm.popups.init(11, 80)
-
-    if popup:
-      try:
-        # displays the available flags
-
-        popup.win.box()
-        popup.addstr(0, 0, "Event Types:", curses.A_STANDOUT)
-        event_lines = CONFIG['msg.misc.event_types'].split("\n")
-
-        for i in range(len(event_lines)):
-          popup.addstr(i + 1, 1, event_lines[i][6:])
-
-        popup.win.refresh()
-
-        user_input = arm.popups.input_prompt("Events to log: ")
-
-        if user_input:
-          user_input = user_input.replace(' ', '')  # strips spaces
-
-          try:
-            self.set_logged_events(arm.arguments.expand_events(user_input))
-          except ValueError as exc:
-            arm.popups.show_msg("Invalid flags: %s" % str(exc), 2)
-      finally:
-        arm.popups.finalize()
-
-  def show_snapshot_prompt(self):
-    """
-    Lets user enter a path to take a snapshot, canceling if left blank.
-    """
-
-    path_input = arm.popups.input_prompt("Path to save log snapshot: ")
-
-    if path_input:
-      try:
-        self.save_snapshot(path_input)
-        arm.popups.show_msg("Saved: %s" % path_input, 2)
-      except IOError as exc:
-        arm.popups.show_msg("Unable to save snapshot: %s" % exc.strerror, 2)
-
-  def clear(self):
-    """
-    Clears the contents of the event log.
-    """
-
-    self.vals_lock.acquire()
-    self.msg_log = []
-    self.redraw(True)
-    self.vals_lock.release()
-
-  def save_snapshot(self, path):
-    """
-    Saves the log events currently being displayed to the given path. This
-    takes filers into account. This overwrites the file if it already exists,
-    and raises an IOError if there's a problem.
-
-    Arguments:
-      path - path where to save the log snapshot
-    """
-
-    path = os.path.abspath(os.path.expanduser(path))
-
-    # make dir if the path doesn't already exist
-
-    base_dir = os.path.dirname(path)
-
-    try:
-      if not os.path.exists(base_dir):
-        os.makedirs(base_dir)
-    except OSError as exc:
-      raise IOError("unable to make directory '%s'" % base_dir)
-
-    snapshot_file = open(path, "w")
-    self.vals_lock.acquire()
-
-    try:
-      for entry in self.msg_log:
-        is_visible = not self.regex_filter or self.regex_filter.search(entry.get_display_message())
-
-        if is_visible:
-          snapshot_file.write(entry.get_display_message(True) + "\n")
-
-      self.vals_lock.release()
-    except Exception as exc:
-      self.vals_lock.release()
-      raise exc
-
-  def handle_key(self, key):
-    is_keystroke_consumed = True
-
-    if uiTools.is_scroll_key(key):
-      page_height = self.get_preferred_size()[0] - 1
-      new_scroll = uiTools.get_scroll_position(key, self.scroll, page_height, self.last_content_height)
-
-      if self.scroll != new_scroll:
-        self.vals_lock.acquire()
-        self.scroll = new_scroll
-        self.redraw(True)
-        self.vals_lock.release()
-    elif key in (ord('u'), ord('U')):
-      self.vals_lock.acquire()
-      self.set_duplicate_visability(not CONFIG["features.log.showDuplicateEntries"])
-      self.redraw(True)
-      self.vals_lock.release()
-    elif key == ord('c') or key == ord('C'):
-      msg = "This will clear the log. Are you sure (c again to confirm)?"
-      key_press = arm.popups.show_msg(msg, attr = curses.A_BOLD)
-
-      if key_press in (ord('c'), ord('C')):
-        self.clear()
-    elif key == ord('f') or key == ord('F'):
-      # Provides menu to pick regular expression filters or adding new ones:
-      # for syntax see: http://docs.python.org/library/re.html#regular-expression-syntax
-
-      options = ["None"] + self.filter_options + ["New..."]
-      old_selection = 0 if not self.regex_filter else 1
-
-      # does all activity under a curses lock to prevent redraws when adding
-      # new filters
-
-      panel.CURSES_LOCK.acquire()
-
-      try:
-        selection = arm.popups.show_menu("Log Filter:", options, old_selection)
-
-        # applies new setting
-
-        if selection == 0:
-          self.set_filter(None)
-        elif selection == len(options) - 1:
-          # selected 'New...' option - prompt user to input regular expression
-          self.show_filter_prompt()
-        elif selection != -1:
-          self.make_filter_selection(self.filter_options[selection - 1])
-      finally:
-        panel.CURSES_LOCK.release()
-
-      if len(self.filter_options) > MAX_REGEX_FILTERS:
-        del self.filter_options[MAX_REGEX_FILTERS:]
-    elif key == ord('e') or key == ord('E'):
-      self.show_event_selection_prompt()
-    elif key == ord('a') or key == ord('A'):
-      self.show_snapshot_prompt()
-    else:
-      is_keystroke_consumed = False
-
-    return is_keystroke_consumed
-
-  def get_help(self):
-    options = []
-    options.append(("up arrow", "scroll log up a line", None))
-    options.append(("down arrow", "scroll log down a line", None))
-    options.append(("a", "save snapshot of the log", None))
-    options.append(("e", "change logged events", None))
-    options.append(("f", "log regex filter", "enabled" if self.regex_filter else "disabled"))
-    options.append(("u", "duplicate log entries", "visible" if CONFIG["features.log.showDuplicateEntries"] else "hidden"))
-    options.append(("c", "clear event log", None))
-    return options
-
-  def draw(self, width, height):
-    """
-    Redraws message log. Entries stretch to use available space and may
-    contain up to two lines. Starts with newest entries.
-    """
-
-    current_log = self.get_attr("msg_log")
-
-    self.vals_lock.acquire()
-    self._last_logged_events, self._last_update = list(current_log), time.time()
-
-    # draws the top label
-
-    if self.is_title_visible():
-      self.addstr(0, 0, self._get_title(width), curses.A_STANDOUT)
-
-    # restricts scroll location to valid bounds
-
-    self.scroll = max(0, min(self.scroll, self.last_content_height - height + 1))
-
-    # draws left-hand scroll bar if content's longer than the height
-
-    msg_indent, divider_indent = 1, 0  # offsets for scroll bar
-    is_scroll_bar_visible = self.last_content_height > height - 1
-
-    if is_scroll_bar_visible:
-      msg_indent, divider_indent = 3, 2
-      self.add_scroll_bar(self.scroll, self.scroll + height - 1, self.last_content_height, 1)
-
-    # draws log entries
-
-    line_count = 1 - self.scroll
-    seen_first_date_divider = False
-    divider_attr, duplicate_attr = curses.A_BOLD | uiTools.get_color("yellow"), curses.A_BOLD | uiTools.get_color("green")
-
-    is_dates_shown = self.regex_filter is None and CONFIG["features.log.showDateDividers"]
-    event_log = get_daybreaks(current_log, self.is_paused()) if is_dates_shown else list(current_log)
-
-    if not CONFIG["features.log.showDuplicateEntries"]:
-      deduplicated_log = get_duplicates(event_log)
-
-      if deduplicated_log is None:
-        log.warn("Deduplication took too long. Its current implementation has difficulty handling large logs so disabling it to keep the interface responsive.")
-        self.set_duplicate_visability(True)
-        deduplicated_log = [(entry, 0) for entry in event_log]
-    else:
-      deduplicated_log = [(entry, 0) for entry in event_log]
-
-    # determines if we have the minimum width to show date dividers
-
-    show_daybreaks = width - divider_indent >= 3
-
-    while deduplicated_log:
-      entry, duplicate_count = deduplicated_log.pop(0)
-
-      if self.regex_filter and not self.regex_filter.search(entry.get_display_message()):
-        continue  # filter doesn't match log message - skip
-
-      # checks if we should be showing a divider with the date
-
-      if entry.type == DAYBREAK_EVENT:
-        # bottom of the divider
-
-        if seen_first_date_divider:
-          if line_count >= 1 and line_count < height and show_daybreaks:
-            self.addch(line_count, divider_indent, curses.ACS_LLCORNER, divider_attr)
-            self.hline(line_count, divider_indent + 1, width - divider_indent - 2, divider_attr)
-            self.addch(line_count, width - 1, curses.ACS_LRCORNER, divider_attr)
-
-          line_count += 1
-
-        # top of the divider
-
-        if line_count >= 1 and line_count < height and show_daybreaks:
-          time_label = time.strftime(" %B %d, %Y ", time.localtime(entry.timestamp))
-          self.addch(line_count, divider_indent, curses.ACS_ULCORNER, divider_attr)
-          self.addch(line_count, divider_indent + 1, curses.ACS_HLINE, divider_attr)
-          self.addstr(line_count, divider_indent + 2, time_label, curses.A_BOLD | divider_attr)
-
-          line_length = width - divider_indent - len(time_label) - 3
-          self.hline(line_count, divider_indent + len(time_label) + 2, line_length, divider_attr)
-          self.addch(line_count, divider_indent + len(time_label) + 2 + line_length, curses.ACS_URCORNER, divider_attr)
-
-        seen_first_date_divider = True
-        line_count += 1
-      else:
-        # entry contents to be displayed, tuples of the form:
-        # (msg, formatting, includeLinebreak)
-
-        display_queue = []
-
-        msg_comp = entry.get_display_message().split("\n")
-
-        for i in range(len(msg_comp)):
-          font = curses.A_BOLD if "ERR" in entry.type else curses.A_NORMAL  # emphasizes ERR messages
-          display_queue.append((msg_comp[i].strip(), font | uiTools.get_color(entry.color), i != len(msg_comp) - 1))
-
-        if duplicate_count:
-          plural_label = "s" if duplicate_count > 1 else ""
-          duplicate_msg = DUPLICATE_MSG % (duplicate_count, plural_label)
-          display_queue.append((duplicate_msg, duplicate_attr, False))
-
-        cursor_location, line_offset = msg_indent, 0
-        max_entries_per_line = CONFIG["features.log.max_lines_per_entry"]
-
-        while display_queue:
-          msg, format, include_break = display_queue.pop(0)
-          draw_line = line_count + line_offset
-
-          if line_offset == max_entries_per_line:
-            break
-
-          max_msg_size = width - cursor_location - 1
-
-          if len(msg) > max_msg_size:
-            # message is too long - break it up
-            if line_offset == max_entries_per_line - 1:
-              msg = uiTools.crop_str(msg, max_msg_size)
-            else:
-              msg, remainder = uiTools.crop_str(msg, max_msg_size, 4, 4, uiTools.Ending.HYPHEN, True)
-              display_queue.insert(0, (remainder.strip(), format, include_break))
-
-            include_break = True
-
-          if draw_line < height and draw_line >= 1:
-            if seen_first_date_divider and width - divider_indent >= 3 and show_daybreaks:
-              self.addch(draw_line, divider_indent, curses.ACS_VLINE, divider_attr)
-              self.addch(draw_line, width - 1, curses.ACS_VLINE, divider_attr)
-
-            self.addstr(draw_line, cursor_location, msg, format)
-
-          cursor_location += len(msg)
-
-          if include_break or not display_queue:
-            line_offset += 1
-            cursor_location = msg_indent + ENTRY_INDENT
-
-        line_count += line_offset
-
-      # if this is the last line and there's room, then draw the bottom of the divider
-
-      if not deduplicated_log and seen_first_date_divider:
-        if line_count < height and show_daybreaks:
-          self.addch(line_count, divider_indent, curses.ACS_LLCORNER, divider_attr)
-          self.hline(line_count, divider_indent + 1, width - divider_indent - 2, divider_attr)
-          self.addch(line_count, width - 1, curses.ACS_LRCORNER, divider_attr)
-
-        line_count += 1
-
-    # redraw the display if...
-    # - last_content_height was off by too much
-    # - we're off the bottom of the page
-
-    new_content_height = line_count + self.scroll - 1
-    content_height_delta = abs(self.last_content_height - new_content_height)
-    force_redraw, force_redraw_reason = True, ""
-
-    if content_height_delta >= CONTENT_HEIGHT_REDRAW_THRESHOLD:
-      force_redraw_reason = "estimate was off by %i" % content_height_delta
-    elif new_content_height > height and self.scroll + height - 1 > new_content_height:
-      force_redraw_reason = "scrolled off the bottom of the page"
-    elif not is_scroll_bar_visible and new_content_height > height - 1:
-      force_redraw_reason = "scroll bar wasn't previously visible"
-    elif is_scroll_bar_visible and new_content_height <= height - 1:
-      force_redraw_reason = "scroll bar shouldn't be visible"
-    else:
-      force_redraw = False
-
-    self.last_content_height = new_content_height
-
-    if force_redraw:
-      log.debug("redrawing the log panel with the corrected content height (%s)" % force_redraw_reason)
-      self.redraw(True)
-
-    self.vals_lock.release()
-
-  def redraw(self, force_redraw=False, block=False):
-    # determines if the content needs to be redrawn or not
-    panel.Panel.redraw(self, force_redraw, block)
-
-  def run(self):
-    """
-    Redraws the display, coalescing updates if events are rapidly logged (for
-    instance running at the DEBUG runlevel) while also being immediately
-    responsive if additions are less frequent.
-    """
-
-    last_day = days_since()  # used to determine if the date has changed
-
-    while not self._halt:
-      current_day = days_since()
-      time_since_reset = time.time() - self._last_update
-      max_log_update_rate = CONFIG["features.log.maxRefreshRate"] / 1000.0
-
-      sleep_time = 0
-
-      if (self.msg_log == self._last_logged_events and last_day == current_day) or self.is_paused():
-        sleep_time = 5
-      elif time_since_reset < max_log_update_rate:
-        sleep_time = max(0.05, max_log_update_rate - time_since_reset)
-
-      if sleep_time:
-        self._cond.acquire()
-
-        if not self._halt:
-          self._cond.wait(sleep_time)
-
-        self._cond.release()
-      else:
-        last_day = current_day
-        self.redraw(True)
-
-        # makes sure that we register this as an update, otherwise lacking the
-        # curses lock can cause a busy wait here
-
-        self._last_update = time.time()
-
-  def stop(self):
-    """
-    Halts further resolutions and terminates the thread.
-    """
-
-    self._cond.acquire()
-    self._halt = True
-    self._cond.notifyAll()
-    self._cond.release()
-
-  def set_event_listening(self, events):
-    """
-    Configures the events Tor listens for, filtering non-tor events from what we
-    request from the controller. This returns a sorted list of the events we
-    successfully set.
-
-    Arguments:
-      events - event types to attempt to set
-    """
-
-    events = set(events)  # drops duplicates
-
-    # accounts for runlevel naming difference
-
-    if "ERROR" in events:
-      events.add("ERR")
-      events.remove("ERROR")
-
-    if "WARNING" in events:
-      events.add("WARN")
-      events.remove("WARNING")
-
-    tor_events = events.intersection(set(arm.arguments.TOR_EVENT_TYPES.values()))
-    arm_events = events.intersection(set(["ARM_%s" % runlevel for runlevel in log.Runlevel.keys()]))
-
-    # adds events unrecognized by arm if we're listening to the 'UNKNOWN' type
-
-    if "UNKNOWN" in events:
-      tor_events.update(set(arm.arguments.missing_event_types()))
-
-    tor_conn = torTools.get_conn()
-    tor_conn.remove_event_listener(self.register_tor_event)
-
-    for event_type in list(tor_events):
-      try:
-        tor_conn.add_event_listener(self.register_tor_event, event_type)
-      except stem.ProtocolError:
-        tor_events.remove(event_type)
-
-    # provides back the input set minus events we failed to set
-
-    return sorted(tor_events.union(arm_events))
-
-  def _reset_listener(self, controller, event_type, _):
-    # if we're attaching to a new tor instance then clears the log and
-    # prepopulates it with the content belonging to this instance
-
-    if event_type == State.INIT:
-      self.reprepopulate_events()
-      self.redraw(True)
-    elif event_type == State.CLOSED:
-      log.notice("Tor control port closed")
-
-  def _get_title(self, width):
-    """
-    Provides the label used for the panel, looking like:
-      Events (ARM NOTICE - ERR, BW - filter: prepopulate):
-
-    This truncates the attributes (with an ellipse) if too long, and condenses
-    runlevel ranges if there's three or more in a row (for instance ARM_INFO,
-    ARM_NOTICE, and ARM_WARN becomes "ARM_INFO - WARN").
-
-    Arguments:
-      width - width constraint the label needs to fix in
-    """
-
-    # usually the attributes used to make the label are decently static, so
-    # provide cached results if they're unchanged
-
-    self.vals_lock.acquire()
-    current_pattern = self.regex_filter.pattern if self.regex_filter else None
-    is_unchanged = self._title_args[0] == self.logged_events
-    is_unchanged &= self._title_args[1] == current_pattern
-    is_unchanged &= self._title_args[2] == width
-
-    if is_unchanged:
-      self.vals_lock.release()
-      return self._title_cache
-
-    events_list = list(self.logged_events)
-
-    if not events_list:
-      if not current_pattern:
-        panel_label = "Events:"
-      else:
-        label_pattern = uiTools.crop_str(current_pattern, width - 18)
-        panel_label = "Events (filter: %s):" % label_pattern
-    else:
-      # does the following with all runlevel types (tor, arm, and stem):
-      # - pulls to the start of the list
-      # - condenses range if there's three or more in a row (ex. "ARM_INFO - WARN")
-      # - condense further if there's identical runlevel ranges for multiple
-      #   types (ex. "NOTICE - ERR, ARM_NOTICE - ERR" becomes "TOR/ARM NOTICE - ERR")
-
-      tmp_runlevels = []  # runlevels pulled from the list (just the runlevel part)
-      runlevel_ranges = []  # tuple of type, start_level, end_level for ranges to be consensed
-
-      # reverses runlevels and types so they're appended in the right order
-
-      reversed_runlevels = list(log.Runlevel)
-      reversed_runlevels.reverse()
-
-      for prefix in ("ARM_", ""):
-        # blank ending runlevel forces the break condition to be reached at the end
-        for runlevel in reversed_runlevels + [""]:
-          event_type = prefix + runlevel
-          if runlevel and event_type in events_list:
-            # runlevel event found, move to the tmp list
-            events_list.remove(event_type)
-            tmp_runlevels.append(runlevel)
-          elif tmp_runlevels:
-            # adds all tmp list entries to the start of events_list
-            if len(tmp_runlevels) >= 3:
-              # save condense sequential runlevels to be added later
-              runlevel_ranges.append((prefix, tmp_runlevels[-1], tmp_runlevels[0]))
-            else:
-              # adds runlevels individaully
-              for tmp_runlevel in tmp_runlevels:
-                events_list.insert(0, prefix + tmp_runlevel)
-
-            tmp_runlevels = []
-
-      # adds runlevel ranges, condensing if there's identical ranges
-
-      for i in range(len(runlevel_ranges)):
-        if runlevel_ranges[i]:
-          prefix, start_level, end_level = runlevel_ranges[i]
-
-          # check for matching ranges
-
-          matches = []
-
-          for j in range(i + 1, len(runlevel_ranges)):
-            if runlevel_ranges[j] and runlevel_ranges[j][1] == start_level and runlevel_ranges[j][2] == end_level:
-              matches.append(runlevel_ranges[j])
-              runlevel_ranges[j] = None
-
-          if matches:
-            # strips underscores and replaces empty entries with "TOR"
-
-            prefixes = [entry[0] for entry in matches] + [prefix]
-
-            for k in range(len(prefixes)):
-              if prefixes[k] == "":
-                prefixes[k] = "TOR"
-              else:
-                prefixes[k] = prefixes[k].replace("_", "")
-
-            events_list.insert(0, "%s %s - %s" % ("/".join(prefixes), start_level, end_level))
-          else:
-            events_list.insert(0, "%s%s - %s" % (prefix, start_level, end_level))
-
-      # truncates to use an ellipsis if too long, for instance:
-
-      attr_label = ", ".join(events_list)
-
-      if current_pattern:
-        attr_label += " - filter: %s" % current_pattern
-
-      attr_label = uiTools.crop_str(attr_label, width - 10, 1)
-
-      if attr_label:
-        attr_label = " (%s)" % attr_label
-
-      panel_label = "Events%s:" % attr_label
-
-    # cache results and return
-
-    self._title_cache = panel_label
-    self._title_args = (list(self.logged_events), current_pattern, width)
-    self.vals_lock.release()
-
-    return panel_label
-
-  def _trim_events(self, event_listing):
-    """
-    Crops events that have either:
-    - grown beyond the cache limit
-    - outlived the configured log duration
-
-    Argument:
-      event_listing - listing of log entries
-    """
-
-    cache_size = CONFIG["cache.log_panel.size"]
-
-    if len(event_listing) > cache_size:
-      del event_listing[cache_size:]
-
-    log_ttl = CONFIG["features.log.entryDuration"]
-
-    if log_ttl > 0:
-      current_day = days_since()
-
-      breakpoint = None  # index at which to crop from
-
-      for i in range(len(event_listing) - 1, -1, -1):
-        days_since_event = current_day - days_since(event_listing[i].timestamp)
-
-        if days_since_event > log_ttl:
-          breakpoint = i  # older than the ttl
-        else:
-          break
-
-      # removes entries older than the ttl
-
-      if breakpoint is not None:
-        del event_listing[breakpoint:]
diff --git a/arm/log_panel.py b/arm/log_panel.py
new file mode 100644
index 0000000..bf96782
--- /dev/null
+++ b/arm/log_panel.py
@@ -0,0 +1,1371 @@
+"""
+Panel providing a chronological log of events its been configured to listen
+for. This provides prepopulation from the log file and supports filtering by
+regular expressions.
+"""
+
+import re
+import os
+import time
+import curses
+import logging
+import threading
+
+import stem
+from stem.control import State
+from stem.response import events
+from stem.util import conf, log, system
+
+import arm.arguments
+import arm.popups
+from arm import __version__
+from arm.util import panel, tor_tools, ui_tools
+
+RUNLEVEL_EVENT_COLOR = {
+  log.DEBUG: "magenta",
+  log.INFO: "blue",
+  log.NOTICE: "green",
+  log.WARN: "yellow",
+  log.ERR: "red",
+}
+
+DAYBREAK_EVENT = "DAYBREAK"  # special event for marking when the date changes
+TIMEZONE_OFFSET = time.altzone if time.localtime()[8] else time.timezone
+
+ENTRY_INDENT = 2  # spaces an entry's message is indented after the first line
+
+
+def conf_handler(key, value):
+  if key == "features.log.max_lines_per_entry":
+    return max(1, value)
+  elif key == "features.log.prepopulateReadLimit":
+    return max(0, value)
+  elif key == "features.log.maxRefreshRate":
+    return max(10, value)
+  elif key == "cache.log_panel.size":
+    return max(1000, value)
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.log_file": "",
+  "features.log.showDateDividers": True,
+  "features.log.showDuplicateEntries": False,
+  "features.log.entryDuration": 7,
+  "features.log.max_lines_per_entry": 6,
+  "features.log.prepopulate": True,
+  "features.log.prepopulateReadLimit": 5000,
+  "features.log.maxRefreshRate": 300,
+  "features.log.regex": [],
+  "cache.log_panel.size": 1000,
+  "msg.misc.event_types": '',
+  "tor.chroot": '',
+}, conf_handler)
+
+DUPLICATE_MSG = " [%i duplicate%s hidden]"
+
+# The height of the drawn content is estimated based on the last time we redrew
+# the panel. It's chiefly used for scrolling and the bar indicating its
+# position. Letting the estimate be too inaccurate results in a display bug, so
+# redraws the display if it's off by this threshold.
+
+CONTENT_HEIGHT_REDRAW_THRESHOLD = 3
+
+# static starting portion of common log entries, fetched from the config when
+# needed if None
+
+COMMON_LOG_MESSAGES = None
+
+# cached values and the arguments that generated it for the get_daybreaks and
+# get_duplicates functions
+
+CACHED_DAYBREAKS_ARGUMENTS = (None, None)  # events, current day
+CACHED_DAYBREAKS_RESULT = None
+CACHED_DUPLICATES_ARGUMENTS = None  # events
+CACHED_DUPLICATES_RESULT = None
+
+# duration we'll wait for the deduplication function before giving up (in ms)
+
+DEDUPLICATION_TIMEOUT = 100
+
+# maximum number of regex filters we'll remember
+
+MAX_REGEX_FILTERS = 5
+
+
+def days_since(timestamp = None):
+  """
+  Provides the number of days since the epoch converted to local time (rounded
+  down).
+
+  Arguments:
+    timestamp - unix timestamp to convert, current time if undefined
+  """
+
+  if timestamp is None:
+    timestamp = time.time()
+
+  return int((timestamp - TIMEZONE_OFFSET) / 86400)
+
+
+def load_log_messages():
+  """
+  Fetches a mapping of common log messages to their runlevels from the config.
+  """
+
+  global COMMON_LOG_MESSAGES
+  arm_config = conf.get_config("arm")
+
+  COMMON_LOG_MESSAGES = {}
+
+  for conf_key in arm_config.keys():
+    if conf_key.startswith("dedup."):
+      event_type = conf_key[4:].upper()
+      messages = arm_config.get(conf_key, [])
+      COMMON_LOG_MESSAGES[event_type] = messages
+
+
+def get_log_file_entries(runlevels, read_limit = None, add_limit = None):
+  """
+  Parses tor's log file for past events matching the given runlevels, providing
+  a list of log entries (ordered newest to oldest). Limiting the number of read
+  entries is suggested to avoid parsing everything from logs in the GB and TB
+  range.
+
+  Arguments:
+    runlevels - event types (DEBUG - ERR) to be returned
+    read_limit - max lines of the log file that'll be read (unlimited if None)
+    add_limit  - maximum entries to provide back (unlimited if None)
+  """
+
+  start_time = time.time()
+
+  if not runlevels:
+    return []
+
+  # checks tor's configuration for the log file's location (if any exists)
+
+  logging_types, logging_location = None, None
+
+  for logging_entry in tor_tools.get_conn().get_option("Log", [], True):
+    # looks for an entry like: notice file /var/log/tor/notices.log
+
+    entry_comp = logging_entry.split()
+
+    if entry_comp[1] == "file":
+      logging_types, logging_location = entry_comp[0], entry_comp[2]
+      break
+
+  if not logging_location:
+    return []
+
+  # includes the prefix for tor paths
+
+  logging_location = CONFIG['tor.chroot'] + logging_location
+
+  # if the runlevels argument is a superset of the log file then we can
+  # limit the read contents to the add_limit
+
+  runlevels = list(log.Runlevel)
+  logging_types = logging_types.upper()
+
+  if add_limit and (not read_limit or read_limit > add_limit):
+    if "-" in logging_types:
+      div_index = logging_types.find("-")
+      start_index = runlevels.index(logging_types[:div_index])
+      end_index = runlevels.index(logging_types[div_index + 1:])
+      log_file_run_levels = runlevels[start_index:end_index + 1]
+    else:
+      start_index = runlevels.index(logging_types)
+      log_file_run_levels = runlevels[start_index:]
+
+    # checks if runlevels we're reporting are a superset of the file's contents
+
+    is_file_subset = True
+
+    for runlevel_type in log_file_run_levels:
+      if runlevel_type not in runlevels:
+        is_file_subset = False
+        break
+
+    if is_file_subset:
+      read_limit = add_limit
+
+  # tries opening the log file, cropping results to avoid choking on huge logs
+
+  lines = []
+
+  try:
+    if read_limit:
+      lines = system.call("tail -n %i %s" % (read_limit, logging_location))
+
+      if not lines:
+        raise IOError()
+    else:
+      log_file = open(logging_location, "r")
+      lines = log_file.readlines()
+      log_file.close()
+  except IOError:
+    log.warn("Unable to read tor's log file: %s" % logging_location)
+
+  if not lines:
+    return []
+
+  logged_events = []
+  current_unix_time, current_local_time = time.time(), time.localtime()
+
+  for i in range(len(lines) - 1, -1, -1):
+    line = lines[i]
+
+    # entries look like:
+    # Jul 15 18:29:48.806 [notice] Parsing GEOIP file.
+
+    line_comp = line.split()
+
+    # Checks that we have all the components we expect. This could happen if
+    # we're either not parsing a tor log or in weird edge cases (like being
+    # out of disk space)
+
+    if len(line_comp) < 4:
+      continue
+
+    event_type = line_comp[3][1:-1].upper()
+
+    if event_type in runlevels:
+      # converts timestamp to unix time
+
+      timestamp = " ".join(line_comp[:3])
+
+      # strips the decimal seconds
+
+      if "." in timestamp:
+        timestamp = timestamp[:timestamp.find(".")]
+
+      # Ignoring wday and yday since they aren't used.
+      #
+      # Pretend the year is 2012, because 2012 is a leap year, and parsing a
+      # date with strptime fails if Feb 29th is passed without a year that's
+      # actually a leap year. We can't just use the current year, because we
+      # might be parsing old logs which didn't get rotated.
+      #
+      # https://trac.torproject.org/projects/tor/ticket/5265
+
+      timestamp = "2012 " + timestamp
+      event_time_comp = list(time.strptime(timestamp, "%Y %b %d %H:%M:%S"))
+      event_time_comp[8] = current_local_time.tm_isdst
+      event_time = time.mktime(event_time_comp)  # converts local to unix time
+
+      # The above is gonna be wrong if the logs are for the previous year. If
+      # the event's in the future then correct for this.
+
+      if event_time > current_unix_time + 60:
+        event_time_comp[0] -= 1
+        event_time = time.mktime(event_time_comp)
+
+      event_msg = " ".join(line_comp[4:])
+      logged_events.append(LogEntry(event_time, event_type, event_msg, RUNLEVEL_EVENT_COLOR[event_type]))
+
+    if "opening log file" in line:
+      break  # this entry marks the start of this tor instance
+
+  if add_limit:
+    logged_events = logged_events[:add_limit]
+
+  log.info("Read %i entries from tor's log file: %s (read limit: %i, runtime: %0.3f)" % (len(logged_events), logging_location, read_limit, time.time() - start_time))
+
+  return logged_events
+
+
+def get_daybreaks(events, ignore_time_for_cache = False):
+  """
+  Provides the input events back with special 'DAYBREAK_EVENT' markers inserted
+  whenever the date changed between log entries (or since the most recent
+  event). The timestamp matches the beginning of the day for the following
+  entry.
+
+  Arguments:
+    events             - chronologically ordered listing of events
+    ignore_time_for_cache - skips taking the day into consideration for providing
+                         cached results if true
+  """
+
+  global CACHED_DAYBREAKS_ARGUMENTS, CACHED_DAYBREAKS_RESULT
+
+  if not events:
+    return []
+
+  new_listing = []
+  current_day = days_since()
+  last_day = current_day
+
+  if CACHED_DAYBREAKS_ARGUMENTS[0] == events and \
+    (ignore_time_for_cache or CACHED_DAYBREAKS_ARGUMENTS[1] == current_day):
+    return list(CACHED_DAYBREAKS_RESULT)
+
+  for entry in events:
+    event_day = days_since(entry.timestamp)
+
+    if event_day != last_day:
+      marker_timestamp = (event_day * 86400) + TIMEZONE_OFFSET
+      new_listing.append(LogEntry(marker_timestamp, DAYBREAK_EVENT, "", "white"))
+
+    new_listing.append(entry)
+    last_day = event_day
+
+  CACHED_DAYBREAKS_ARGUMENTS = (list(events), current_day)
+  CACHED_DAYBREAKS_RESULT = list(new_listing)
+
+  return new_listing
+
+
+def get_duplicates(events):
+  """
+  Deduplicates a list of log entries, providing back a tuple listing with the
+  log entry and count of duplicates following it. Entries in different days are
+  not considered to be duplicates. This times out, returning None if it takes
+  longer than DEDUPLICATION_TIMEOUT.
+
+  Arguments:
+    events - chronologically ordered listing of events
+  """
+
+  global CACHED_DUPLICATES_ARGUMENTS, CACHED_DUPLICATES_RESULT
+
+  if CACHED_DUPLICATES_ARGUMENTS == events:
+    return list(CACHED_DUPLICATES_RESULT)
+
+  # loads common log entries from the config if they haven't been
+
+  if COMMON_LOG_MESSAGES is None:
+    load_log_messages()
+
+  start_time = time.time()
+  events_remaining = list(events)
+  return_events = []
+
+  while events_remaining:
+    entry = events_remaining.pop(0)
+    duplicate_indices = is_duplicate(entry, events_remaining, True)
+
+    # checks if the call timeout has been reached
+
+    if (time.time() - start_time) > DEDUPLICATION_TIMEOUT / 1000.0:
+      return None
+
+    # drops duplicate entries
+
+    duplicate_indices.reverse()
+
+    for i in duplicate_indices:
+      del events_remaining[i]
+
+    return_events.append((entry, len(duplicate_indices)))
+
+  CACHED_DUPLICATES_ARGUMENTS = list(events)
+  CACHED_DUPLICATES_RESULT = list(return_events)
+
+  return return_events
+
+
+def is_duplicate(event, event_set, get_duplicates = False):
+  """
+  True if the event is a duplicate for something in the event_set, false
+  otherwise. If the get_duplicates flag is set this provides the indices of
+  the duplicates instead.
+
+  Arguments:
+    event         - event to search for duplicates of
+    event_set      - set to look for the event in
+    get_duplicates - instead of providing back a boolean this gives a list of
+                    the duplicate indices in the event_set
+  """
+
+  duplicate_indices = []
+
+  for i in range(len(event_set)):
+    forward_entry = event_set[i]
+
+    # if showing dates then do duplicate detection for each day, rather
+    # than globally
+
+    if forward_entry.type == DAYBREAK_EVENT:
+      break
+
+    if event.type == forward_entry.type:
+      is_duplicate = False
+
+      if event.msg == forward_entry.msg:
+        is_duplicate = True
+      elif event.type in COMMON_LOG_MESSAGES:
+        for common_msg in COMMON_LOG_MESSAGES[event.type]:
+          # if it starts with an asterisk then check the whole message rather
+          # than just the start
+
+          if common_msg[0] == "*":
+            is_duplicate = common_msg[1:] in event.msg and common_msg[1:] in forward_entry.msg
+          else:
+            is_duplicate = event.msg.startswith(common_msg) and forward_entry.msg.startswith(common_msg)
+
+          if is_duplicate:
+            break
+
+      if is_duplicate:
+        if get_duplicates:
+          duplicate_indices.append(i)
+        else:
+          return True
+
+  if get_duplicates:
+    return duplicate_indices
+  else:
+    return False
+
+
+class LogEntry():
+  """
+  Individual log file entry, having the following attributes:
+    timestamp - unix timestamp for when the event occurred
+    event_type - event type that occurred ("INFO", "BW", "ARM_WARN", etc)
+    msg       - message that was logged
+    color     - color of the log entry
+  """
+
+  def __init__(self, timestamp, event_type, msg, color):
+    self.timestamp = timestamp
+    self.type = event_type
+    self.msg = msg
+    self.color = color
+    self._display_message = None
+
+  def get_display_message(self, include_date = False):
+    """
+    Provides the entry's message for the log.
+
+    Arguments:
+      include_date - appends the event's date to the start of the message
+    """
+
+    if include_date:
+      # not the common case so skip caching
+      entry_time = time.localtime(self.timestamp)
+      time_label = "%i/%i/%i %02i:%02i:%02i" % (entry_time[1], entry_time[2], entry_time[0], entry_time[3], entry_time[4], entry_time[5])
+      return "%s [%s] %s" % (time_label, self.type, self.msg)
+
+    if not self._display_message:
+      entry_time = time.localtime(self.timestamp)
+      self._display_message = "%02i:%02i:%02i [%s] %s" % (entry_time[3], entry_time[4], entry_time[5], self.type, self.msg)
+
+    return self._display_message
+
+
+class LogPanel(panel.Panel, threading.Thread, logging.Handler):
+  """
+  Listens for and displays tor, arm, and stem events. This can prepopulate
+  from tor's log file if it exists.
+  """
+
+  def __init__(self, stdscr, logged_events):
+    panel.Panel.__init__(self, stdscr, "log", 0)
+    logging.Handler.__init__(self, level = log.logging_level(log.DEBUG))
+
+    self.setFormatter(logging.Formatter(
+      fmt = '%(asctime)s [%(levelname)s] %(message)s',
+      datefmt = '%m/%d/%Y %H:%M:%S'),
+    )
+
+    threading.Thread.__init__(self)
+    self.setDaemon(True)
+
+    # Make sure that the msg.* messages are loaded. Lazy loading it later is
+    # fine, but this way we're sure it happens before warning about unused
+    # config options.
+
+    load_log_messages()
+
+    # regex filters the user has defined
+
+    self.filter_options = []
+
+    for filter in CONFIG["features.log.regex"]:
+      # checks if we can't have more filters
+
+      if len(self.filter_options) >= MAX_REGEX_FILTERS:
+        break
+
+      try:
+        re.compile(filter)
+        self.filter_options.append(filter)
+      except re.error as exc:
+        log.notice("Invalid regular expression pattern (%s): %s" % (exc, filter))
+
+    self.logged_events = []  # needs to be set before we receive any events
+
+    # restricts the input to the set of events we can listen to, and
+    # configures the controller to liten to them
+
+    self.logged_events = self.set_event_listening(logged_events)
+
+    self.set_pause_attr("msg_log")       # tracks the message log when we're paused
+    self.msg_log = []                    # log entries, sorted by the timestamp
+    self.regex_filter = None             # filter for presented log events (no filtering if None)
+    self.last_content_height = 0         # height of the rendered content when last drawn
+    self.log_file = None                 # file log messages are saved to (skipped if None)
+    self.scroll = 0
+
+    self._last_update = -1               # time the content was last revised
+    self._halt = False                   # terminates thread if true
+    self._cond = threading.Condition()   # used for pausing/resuming the thread
+
+    # restricts concurrent write access to attributes used to draw the display
+    # and pausing:
+    # msg_log, logged_events, regex_filter, scroll
+
+    self.vals_lock = threading.RLock()
+
+    # cached parameters (invalidated if arguments for them change)
+    # last set of events we've drawn with
+
+    self._last_logged_events = []
+
+    # _get_title (args: logged_events, regex_filter pattern, width)
+
+    self._title_cache = None
+    self._title_args = (None, None, None)
+
+    self.reprepopulate_events()
+
+    # leaving last_content_height as being too low causes initialization problems
+
+    self.last_content_height = len(self.msg_log)
+
+    # adds listeners for tor and stem events
+
+    conn = tor_tools.get_conn()
+    conn.add_status_listener(self._reset_listener)
+
+    # opens log file if we'll be saving entries
+
+    if CONFIG["features.log_file"]:
+      log_path = CONFIG["features.log_file"]
+
+      try:
+        # make dir if the path doesn't already exist
+
+        base_dir = os.path.dirname(log_path)
+
+        if not os.path.exists(base_dir):
+          os.makedirs(base_dir)
+
+        self.log_file = open(log_path, "a")
+        log.notice("arm %s opening log file (%s)" % (__version__, log_path))
+      except IOError as exc:
+        log.error("Unable to write to log file: %s" % exc.strerror)
+        self.log_file = None
+      except OSError as exc:
+        log.error("Unable to write to log file: %s" % exc)
+        self.log_file = None
+
+    stem_logger = log.get_logger()
+    stem_logger.addHandler(self)
+
+  def emit(self, record):
+    if record.levelname == "WARNING":
+      record.levelname = "WARN"
+
+    event_color = RUNLEVEL_EVENT_COLOR[record.levelname]
+    self.register_event(LogEntry(int(record.created), "ARM_%s" % record.levelname, record.msg, event_color))
+
+  def reprepopulate_events(self):
+    """
+    Clears the event log and repopulates it from the arm and tor backlogs.
+    """
+
+    self.vals_lock.acquire()
+
+    # clears the event log
+
+    self.msg_log = []
+
+    # fetches past tor events from log file, if available
+
+    if CONFIG["features.log.prepopulate"]:
+      set_runlevels = list(set.intersection(set(self.logged_events), set(list(log.Runlevel))))
+      read_limit = CONFIG["features.log.prepopulateReadLimit"]
+      add_limit = CONFIG["cache.log_panel.size"]
+
+      for entry in get_log_file_entries(set_runlevels, read_limit, add_limit):
+        self.msg_log.append(entry)
+
+    # crops events that are either too old, or more numerous than the caching size
+
+    self._trim_events(self.msg_log)
+
+    self.vals_lock.release()
+
+  def set_duplicate_visability(self, is_visible):
+    """
+    Sets if duplicate log entries are collaped or expanded.
+
+    Arguments:
+      is_visible - if true all log entries are shown, otherwise they're
+                   deduplicated
+    """
+
+    arm_config = conf.get_config("arm")
+    arm_config.set("features.log.showDuplicateEntries", str(is_visible))
+
+  def register_tor_event(self, event):
+    """
+    Translates a stem.response.event.Event instance into a LogEvent, and calls
+    register_event().
+    """
+
+    msg, color = ' '.join(str(event).split(' ')[1:]), "white"
+
+    if isinstance(event, events.CircuitEvent):
+      color = "yellow"
+    elif isinstance(event, events.BandwidthEvent):
+      color = "cyan"
+      msg = "READ: %i, WRITTEN: %i" % (event.read, event.written)
+    elif isinstance(event, events.LogEvent):
+      color = RUNLEVEL_EVENT_COLOR[event.runlevel]
+      msg = event.message
+    elif isinstance(event, events.NetworkStatusEvent):
+      color = "blue"
+    elif isinstance(event, events.NewConsensusEvent):
+      color = "magenta"
+    elif isinstance(event, events.GuardEvent):
+      color = "yellow"
+    elif not event.type in arm.arguments.TOR_EVENT_TYPES.values():
+      color = "red"  # unknown event type
+
+    self.register_event(LogEntry(event.arrived_at, event.type, msg, color))
+
+  def register_event(self, event):
+    """
+    Notes event and redraws log. If paused it's held in a temporary buffer.
+
+    Arguments:
+      event - LogEntry for the event that occurred
+    """
+
+    if not event.type in self.logged_events:
+      return
+
+    # strips control characters to avoid screwing up the terminal
+
+    event.msg = ui_tools.get_printable(event.msg)
+
+    # note event in the log file if we're saving them
+
+    if self.log_file:
+      try:
+        self.log_file.write(event.get_display_message(True) + "\n")
+        self.log_file.flush()
+      except IOError as exc:
+        log.error("Unable to write to log file: %s" % exc.strerror)
+        self.log_file = None
+
+    self.vals_lock.acquire()
+    self.msg_log.insert(0, event)
+    self._trim_events(self.msg_log)
+
+    # notifies the display that it has new content
+
+    if not self.regex_filter or self.regex_filter.search(event.get_display_message()):
+      self._cond.acquire()
+      self._cond.notifyAll()
+      self._cond.release()
+
+    self.vals_lock.release()
+
+  def set_logged_events(self, event_types):
+    """
+    Sets the event types recognized by the panel.
+
+    Arguments:
+      event_types - event types to be logged
+    """
+
+    if event_types == self.logged_events:
+      return
+
+    self.vals_lock.acquire()
+
+    # configures the controller to listen for these tor events, and provides
+    # back a subset without anything we're failing to listen to
+
+    set_types = self.set_event_listening(event_types)
+    self.logged_events = set_types
+    self.redraw(True)
+    self.vals_lock.release()
+
+  def get_filter(self):
+    """
+    Provides our currently selected regex filter.
+    """
+
+    return self.filter_options[0] if self.regex_filter else None
+
+  def set_filter(self, log_filter):
+    """
+    Filters log entries according to the given regular expression.
+
+    Arguments:
+      log_filter - regular expression used to determine which messages are
+                  shown, None if no filter should be applied
+    """
+
+    if log_filter == self.regex_filter:
+      return
+
+    self.vals_lock.acquire()
+    self.regex_filter = log_filter
+    self.redraw(True)
+    self.vals_lock.release()
+
+  def make_filter_selection(self, selected_option):
+    """
+    Makes the given filter selection, applying it to the log and reorganizing
+    our filter selection.
+
+    Arguments:
+      selected_option - regex filter we've already added, None if no filter
+                       should be applied
+    """
+
+    if selected_option:
+      try:
+        self.set_filter(re.compile(selected_option))
+
+        # move selection to top
+
+        self.filter_options.remove(selected_option)
+        self.filter_options.insert(0, selected_option)
+      except re.error as exc:
+        # shouldn't happen since we've already checked validity
+
+        log.warn("Invalid regular expression ('%s': %s) - removing from listing" % (selected_option, exc))
+        self.filter_options.remove(selected_option)
+    else:
+      self.set_filter(None)
+
+  def show_filter_prompt(self):
+    """
+    Prompts the user to add a new regex filter.
+    """
+
+    regex_input = arm.popups.input_prompt("Regular expression: ")
+
+    if regex_input:
+      try:
+        self.set_filter(re.compile(regex_input))
+
+        if regex_input in self.filter_options:
+          self.filter_options.remove(regex_input)
+
+        self.filter_options.insert(0, regex_input)
+      except re.error as exc:
+        arm.popups.show_msg("Unable to compile expression: %s" % exc, 2)
+
+  def show_event_selection_prompt(self):
+    """
+    Prompts the user to select the events being listened for.
+    """
+
+    # allow user to enter new types of events to log - unchanged if left blank
+
+    popup, width, height = arm.popups.init(11, 80)
+
+    if popup:
+      try:
+        # displays the available flags
+
+        popup.win.box()
+        popup.addstr(0, 0, "Event Types:", curses.A_STANDOUT)
+        event_lines = CONFIG['msg.misc.event_types'].split("\n")
+
+        for i in range(len(event_lines)):
+          popup.addstr(i + 1, 1, event_lines[i][6:])
+
+        popup.win.refresh()
+
+        user_input = arm.popups.input_prompt("Events to log: ")
+
+        if user_input:
+          user_input = user_input.replace(' ', '')  # strips spaces
+
+          try:
+            self.set_logged_events(arm.arguments.expand_events(user_input))
+          except ValueError as exc:
+            arm.popups.show_msg("Invalid flags: %s" % str(exc), 2)
+      finally:
+        arm.popups.finalize()
+
+  def show_snapshot_prompt(self):
+    """
+    Lets user enter a path to take a snapshot, canceling if left blank.
+    """
+
+    path_input = arm.popups.input_prompt("Path to save log snapshot: ")
+
+    if path_input:
+      try:
+        self.save_snapshot(path_input)
+        arm.popups.show_msg("Saved: %s" % path_input, 2)
+      except IOError as exc:
+        arm.popups.show_msg("Unable to save snapshot: %s" % exc.strerror, 2)
+
+  def clear(self):
+    """
+    Clears the contents of the event log.
+    """
+
+    self.vals_lock.acquire()
+    self.msg_log = []
+    self.redraw(True)
+    self.vals_lock.release()
+
+  def save_snapshot(self, path):
+    """
+    Saves the log events currently being displayed to the given path. This
+    takes filers into account. This overwrites the file if it already exists,
+    and raises an IOError if there's a problem.
+
+    Arguments:
+      path - path where to save the log snapshot
+    """
+
+    path = os.path.abspath(os.path.expanduser(path))
+
+    # make dir if the path doesn't already exist
+
+    base_dir = os.path.dirname(path)
+
+    try:
+      if not os.path.exists(base_dir):
+        os.makedirs(base_dir)
+    except OSError as exc:
+      raise IOError("unable to make directory '%s'" % base_dir)
+
+    snapshot_file = open(path, "w")
+    self.vals_lock.acquire()
+
+    try:
+      for entry in self.msg_log:
+        is_visible = not self.regex_filter or self.regex_filter.search(entry.get_display_message())
+
+        if is_visible:
+          snapshot_file.write(entry.get_display_message(True) + "\n")
+
+      self.vals_lock.release()
+    except Exception as exc:
+      self.vals_lock.release()
+      raise exc
+
+  def handle_key(self, key):
+    is_keystroke_consumed = True
+
+    if ui_tools.is_scroll_key(key):
+      page_height = self.get_preferred_size()[0] - 1
+      new_scroll = ui_tools.get_scroll_position(key, self.scroll, page_height, self.last_content_height)
+
+      if self.scroll != new_scroll:
+        self.vals_lock.acquire()
+        self.scroll = new_scroll
+        self.redraw(True)
+        self.vals_lock.release()
+    elif key in (ord('u'), ord('U')):
+      self.vals_lock.acquire()
+      self.set_duplicate_visability(not CONFIG["features.log.showDuplicateEntries"])
+      self.redraw(True)
+      self.vals_lock.release()
+    elif key == ord('c') or key == ord('C'):
+      msg = "This will clear the log. Are you sure (c again to confirm)?"
+      key_press = arm.popups.show_msg(msg, attr = curses.A_BOLD)
+
+      if key_press in (ord('c'), ord('C')):
+        self.clear()
+    elif key == ord('f') or key == ord('F'):
+      # Provides menu to pick regular expression filters or adding new ones:
+      # for syntax see: http://docs.python.org/library/re.html#regular-expression-syntax
+
+      options = ["None"] + self.filter_options + ["New..."]
+      old_selection = 0 if not self.regex_filter else 1
+
+      # does all activity under a curses lock to prevent redraws when adding
+      # new filters
+
+      panel.CURSES_LOCK.acquire()
+
+      try:
+        selection = arm.popups.show_menu("Log Filter:", options, old_selection)
+
+        # applies new setting
+
+        if selection == 0:
+          self.set_filter(None)
+        elif selection == len(options) - 1:
+          # selected 'New...' option - prompt user to input regular expression
+          self.show_filter_prompt()
+        elif selection != -1:
+          self.make_filter_selection(self.filter_options[selection - 1])
+      finally:
+        panel.CURSES_LOCK.release()
+
+      if len(self.filter_options) > MAX_REGEX_FILTERS:
+        del self.filter_options[MAX_REGEX_FILTERS:]
+    elif key == ord('e') or key == ord('E'):
+      self.show_event_selection_prompt()
+    elif key == ord('a') or key == ord('A'):
+      self.show_snapshot_prompt()
+    else:
+      is_keystroke_consumed = False
+
+    return is_keystroke_consumed
+
+  def get_help(self):
+    options = []
+    options.append(("up arrow", "scroll log up a line", None))
+    options.append(("down arrow", "scroll log down a line", None))
+    options.append(("a", "save snapshot of the log", None))
+    options.append(("e", "change logged events", None))
+    options.append(("f", "log regex filter", "enabled" if self.regex_filter else "disabled"))
+    options.append(("u", "duplicate log entries", "visible" if CONFIG["features.log.showDuplicateEntries"] else "hidden"))
+    options.append(("c", "clear event log", None))
+    return options
+
+  def draw(self, width, height):
+    """
+    Redraws message log. Entries stretch to use available space and may
+    contain up to two lines. Starts with newest entries.
+    """
+
+    current_log = self.get_attr("msg_log")
+
+    self.vals_lock.acquire()
+    self._last_logged_events, self._last_update = list(current_log), time.time()
+
+    # draws the top label
+
+    if self.is_title_visible():
+      self.addstr(0, 0, self._get_title(width), curses.A_STANDOUT)
+
+    # restricts scroll location to valid bounds
+
+    self.scroll = max(0, min(self.scroll, self.last_content_height - height + 1))
+
+    # draws left-hand scroll bar if content's longer than the height
+
+    msg_indent, divider_indent = 1, 0  # offsets for scroll bar
+    is_scroll_bar_visible = self.last_content_height > height - 1
+
+    if is_scroll_bar_visible:
+      msg_indent, divider_indent = 3, 2
+      self.add_scroll_bar(self.scroll, self.scroll + height - 1, self.last_content_height, 1)
+
+    # draws log entries
+
+    line_count = 1 - self.scroll
+    seen_first_date_divider = False
+    divider_attr, duplicate_attr = curses.A_BOLD | ui_tools.get_color("yellow"), curses.A_BOLD | ui_tools.get_color("green")
+
+    is_dates_shown = self.regex_filter is None and CONFIG["features.log.showDateDividers"]
+    event_log = get_daybreaks(current_log, self.is_paused()) if is_dates_shown else list(current_log)
+
+    if not CONFIG["features.log.showDuplicateEntries"]:
+      deduplicated_log = get_duplicates(event_log)
+
+      if deduplicated_log is None:
+        log.warn("Deduplication took too long. Its current implementation has difficulty handling large logs so disabling it to keep the interface responsive.")
+        self.set_duplicate_visability(True)
+        deduplicated_log = [(entry, 0) for entry in event_log]
+    else:
+      deduplicated_log = [(entry, 0) for entry in event_log]
+
+    # determines if we have the minimum width to show date dividers
+
+    show_daybreaks = width - divider_indent >= 3
+
+    while deduplicated_log:
+      entry, duplicate_count = deduplicated_log.pop(0)
+
+      if self.regex_filter and not self.regex_filter.search(entry.get_display_message()):
+        continue  # filter doesn't match log message - skip
+
+      # checks if we should be showing a divider with the date
+
+      if entry.type == DAYBREAK_EVENT:
+        # bottom of the divider
+
+        if seen_first_date_divider:
+          if line_count >= 1 and line_count < height and show_daybreaks:
+            self.addch(line_count, divider_indent, curses.ACS_LLCORNER, divider_attr)
+            self.hline(line_count, divider_indent + 1, width - divider_indent - 2, divider_attr)
+            self.addch(line_count, width - 1, curses.ACS_LRCORNER, divider_attr)
+
+          line_count += 1
+
+        # top of the divider
+
+        if line_count >= 1 and line_count < height and show_daybreaks:
+          time_label = time.strftime(" %B %d, %Y ", time.localtime(entry.timestamp))
+          self.addch(line_count, divider_indent, curses.ACS_ULCORNER, divider_attr)
+          self.addch(line_count, divider_indent + 1, curses.ACS_HLINE, divider_attr)
+          self.addstr(line_count, divider_indent + 2, time_label, curses.A_BOLD | divider_attr)
+
+          line_length = width - divider_indent - len(time_label) - 3
+          self.hline(line_count, divider_indent + len(time_label) + 2, line_length, divider_attr)
+          self.addch(line_count, divider_indent + len(time_label) + 2 + line_length, curses.ACS_URCORNER, divider_attr)
+
+        seen_first_date_divider = True
+        line_count += 1
+      else:
+        # entry contents to be displayed, tuples of the form:
+        # (msg, formatting, includeLinebreak)
+
+        display_queue = []
+
+        msg_comp = entry.get_display_message().split("\n")
+
+        for i in range(len(msg_comp)):
+          font = curses.A_BOLD if "ERR" in entry.type else curses.A_NORMAL  # emphasizes ERR messages
+          display_queue.append((msg_comp[i].strip(), font | ui_tools.get_color(entry.color), i != len(msg_comp) - 1))
+
+        if duplicate_count:
+          plural_label = "s" if duplicate_count > 1 else ""
+          duplicate_msg = DUPLICATE_MSG % (duplicate_count, plural_label)
+          display_queue.append((duplicate_msg, duplicate_attr, False))
+
+        cursor_location, line_offset = msg_indent, 0
+        max_entries_per_line = CONFIG["features.log.max_lines_per_entry"]
+
+        while display_queue:
+          msg, format, include_break = display_queue.pop(0)
+          draw_line = line_count + line_offset
+
+          if line_offset == max_entries_per_line:
+            break
+
+          max_msg_size = width - cursor_location - 1
+
+          if len(msg) > max_msg_size:
+            # message is too long - break it up
+            if line_offset == max_entries_per_line - 1:
+              msg = ui_tools.crop_str(msg, max_msg_size)
+            else:
+              msg, remainder = ui_tools.crop_str(msg, max_msg_size, 4, 4, ui_tools.Ending.HYPHEN, True)
+              display_queue.insert(0, (remainder.strip(), format, include_break))
+
+            include_break = True
+
+          if draw_line < height and draw_line >= 1:
+            if seen_first_date_divider and width - divider_indent >= 3 and show_daybreaks:
+              self.addch(draw_line, divider_indent, curses.ACS_VLINE, divider_attr)
+              self.addch(draw_line, width - 1, curses.ACS_VLINE, divider_attr)
+
+            self.addstr(draw_line, cursor_location, msg, format)
+
+          cursor_location += len(msg)
+
+          if include_break or not display_queue:
+            line_offset += 1
+            cursor_location = msg_indent + ENTRY_INDENT
+
+        line_count += line_offset
+
+      # if this is the last line and there's room, then draw the bottom of the divider
+
+      if not deduplicated_log and seen_first_date_divider:
+        if line_count < height and show_daybreaks:
+          self.addch(line_count, divider_indent, curses.ACS_LLCORNER, divider_attr)
+          self.hline(line_count, divider_indent + 1, width - divider_indent - 2, divider_attr)
+          self.addch(line_count, width - 1, curses.ACS_LRCORNER, divider_attr)
+
+        line_count += 1
+
+    # redraw the display if...
+    # - last_content_height was off by too much
+    # - we're off the bottom of the page
+
+    new_content_height = line_count + self.scroll - 1
+    content_height_delta = abs(self.last_content_height - new_content_height)
+    force_redraw, force_redraw_reason = True, ""
+
+    if content_height_delta >= CONTENT_HEIGHT_REDRAW_THRESHOLD:
+      force_redraw_reason = "estimate was off by %i" % content_height_delta
+    elif new_content_height > height and self.scroll + height - 1 > new_content_height:
+      force_redraw_reason = "scrolled off the bottom of the page"
+    elif not is_scroll_bar_visible and new_content_height > height - 1:
+      force_redraw_reason = "scroll bar wasn't previously visible"
+    elif is_scroll_bar_visible and new_content_height <= height - 1:
+      force_redraw_reason = "scroll bar shouldn't be visible"
+    else:
+      force_redraw = False
+
+    self.last_content_height = new_content_height
+
+    if force_redraw:
+      log.debug("redrawing the log panel with the corrected content height (%s)" % force_redraw_reason)
+      self.redraw(True)
+
+    self.vals_lock.release()
+
+  def redraw(self, force_redraw=False, block=False):
+    # determines if the content needs to be redrawn or not
+    panel.Panel.redraw(self, force_redraw, block)
+
+  def run(self):
+    """
+    Redraws the display, coalescing updates if events are rapidly logged (for
+    instance running at the DEBUG runlevel) while also being immediately
+    responsive if additions are less frequent.
+    """
+
+    last_day = days_since()  # used to determine if the date has changed
+
+    while not self._halt:
+      current_day = days_since()
+      time_since_reset = time.time() - self._last_update
+      max_log_update_rate = CONFIG["features.log.maxRefreshRate"] / 1000.0
+
+      sleep_time = 0
+
+      if (self.msg_log == self._last_logged_events and last_day == current_day) or self.is_paused():
+        sleep_time = 5
+      elif time_since_reset < max_log_update_rate:
+        sleep_time = max(0.05, max_log_update_rate - time_since_reset)
+
+      if sleep_time:
+        self._cond.acquire()
+
+        if not self._halt:
+          self._cond.wait(sleep_time)
+
+        self._cond.release()
+      else:
+        last_day = current_day
+        self.redraw(True)
+
+        # makes sure that we register this as an update, otherwise lacking the
+        # curses lock can cause a busy wait here
+
+        self._last_update = time.time()
+
+  def stop(self):
+    """
+    Halts further resolutions and terminates the thread.
+    """
+
+    self._cond.acquire()
+    self._halt = True
+    self._cond.notifyAll()
+    self._cond.release()
+
+  def set_event_listening(self, events):
+    """
+    Configures the events Tor listens for, filtering non-tor events from what we
+    request from the controller. This returns a sorted list of the events we
+    successfully set.
+
+    Arguments:
+      events - event types to attempt to set
+    """
+
+    events = set(events)  # drops duplicates
+
+    # accounts for runlevel naming difference
+
+    if "ERROR" in events:
+      events.add("ERR")
+      events.remove("ERROR")
+
+    if "WARNING" in events:
+      events.add("WARN")
+      events.remove("WARNING")
+
+    tor_events = events.intersection(set(arm.arguments.TOR_EVENT_TYPES.values()))
+    arm_events = events.intersection(set(["ARM_%s" % runlevel for runlevel in log.Runlevel.keys()]))
+
+    # adds events unrecognized by arm if we're listening to the 'UNKNOWN' type
+
+    if "UNKNOWN" in events:
+      tor_events.update(set(arm.arguments.missing_event_types()))
+
+    tor_conn = tor_tools.get_conn()
+    tor_conn.remove_event_listener(self.register_tor_event)
+
+    for event_type in list(tor_events):
+      try:
+        tor_conn.add_event_listener(self.register_tor_event, event_type)
+      except stem.ProtocolError:
+        tor_events.remove(event_type)
+
+    # provides back the input set minus events we failed to set
+
+    return sorted(tor_events.union(arm_events))
+
+  def _reset_listener(self, controller, event_type, _):
+    # if we're attaching to a new tor instance then clears the log and
+    # prepopulates it with the content belonging to this instance
+
+    if event_type == State.INIT:
+      self.reprepopulate_events()
+      self.redraw(True)
+    elif event_type == State.CLOSED:
+      log.notice("Tor control port closed")
+
+  def _get_title(self, width):
+    """
+    Provides the label used for the panel, looking like:
+      Events (ARM NOTICE - ERR, BW - filter: prepopulate):
+
+    This truncates the attributes (with an ellipse) if too long, and condenses
+    runlevel ranges if there's three or more in a row (for instance ARM_INFO,
+    ARM_NOTICE, and ARM_WARN becomes "ARM_INFO - WARN").
+
+    Arguments:
+      width - width constraint the label needs to fix in
+    """
+
+    # usually the attributes used to make the label are decently static, so
+    # provide cached results if they're unchanged
+
+    self.vals_lock.acquire()
+    current_pattern = self.regex_filter.pattern if self.regex_filter else None
+    is_unchanged = self._title_args[0] == self.logged_events
+    is_unchanged &= self._title_args[1] == current_pattern
+    is_unchanged &= self._title_args[2] == width
+
+    if is_unchanged:
+      self.vals_lock.release()
+      return self._title_cache
+
+    events_list = list(self.logged_events)
+
+    if not events_list:
+      if not current_pattern:
+        panel_label = "Events:"
+      else:
+        label_pattern = ui_tools.crop_str(current_pattern, width - 18)
+        panel_label = "Events (filter: %s):" % label_pattern
+    else:
+      # does the following with all runlevel types (tor, arm, and stem):
+      # - pulls to the start of the list
+      # - condenses range if there's three or more in a row (ex. "ARM_INFO - WARN")
+      # - condense further if there's identical runlevel ranges for multiple
+      #   types (ex. "NOTICE - ERR, ARM_NOTICE - ERR" becomes "TOR/ARM NOTICE - ERR")
+
+      tmp_runlevels = []  # runlevels pulled from the list (just the runlevel part)
+      runlevel_ranges = []  # tuple of type, start_level, end_level for ranges to be consensed
+
+      # reverses runlevels and types so they're appended in the right order
+
+      reversed_runlevels = list(log.Runlevel)
+      reversed_runlevels.reverse()
+
+      for prefix in ("ARM_", ""):
+        # blank ending runlevel forces the break condition to be reached at the end
+        for runlevel in reversed_runlevels + [""]:
+          event_type = prefix + runlevel
+          if runlevel and event_type in events_list:
+            # runlevel event found, move to the tmp list
+            events_list.remove(event_type)
+            tmp_runlevels.append(runlevel)
+          elif tmp_runlevels:
+            # adds all tmp list entries to the start of events_list
+            if len(tmp_runlevels) >= 3:
+              # save condense sequential runlevels to be added later
+              runlevel_ranges.append((prefix, tmp_runlevels[-1], tmp_runlevels[0]))
+            else:
+              # adds runlevels individaully
+              for tmp_runlevel in tmp_runlevels:
+                events_list.insert(0, prefix + tmp_runlevel)
+
+            tmp_runlevels = []
+
+      # adds runlevel ranges, condensing if there's identical ranges
+
+      for i in range(len(runlevel_ranges)):
+        if runlevel_ranges[i]:
+          prefix, start_level, end_level = runlevel_ranges[i]
+
+          # check for matching ranges
+
+          matches = []
+
+          for j in range(i + 1, len(runlevel_ranges)):
+            if runlevel_ranges[j] and runlevel_ranges[j][1] == start_level and runlevel_ranges[j][2] == end_level:
+              matches.append(runlevel_ranges[j])
+              runlevel_ranges[j] = None
+
+          if matches:
+            # strips underscores and replaces empty entries with "TOR"
+
+            prefixes = [entry[0] for entry in matches] + [prefix]
+
+            for k in range(len(prefixes)):
+              if prefixes[k] == "":
+                prefixes[k] = "TOR"
+              else:
+                prefixes[k] = prefixes[k].replace("_", "")
+
+            events_list.insert(0, "%s %s - %s" % ("/".join(prefixes), start_level, end_level))
+          else:
+            events_list.insert(0, "%s%s - %s" % (prefix, start_level, end_level))
+
+      # truncates to use an ellipsis if too long, for instance:
+
+      attr_label = ", ".join(events_list)
+
+      if current_pattern:
+        attr_label += " - filter: %s" % current_pattern
+
+      attr_label = ui_tools.crop_str(attr_label, width - 10, 1)
+
+      if attr_label:
+        attr_label = " (%s)" % attr_label
+
+      panel_label = "Events%s:" % attr_label
+
+    # cache results and return
+
+    self._title_cache = panel_label
+    self._title_args = (list(self.logged_events), current_pattern, width)
+    self.vals_lock.release()
+
+    return panel_label
+
+  def _trim_events(self, event_listing):
+    """
+    Crops events that have either:
+    - grown beyond the cache limit
+    - outlived the configured log duration
+
+    Argument:
+      event_listing - listing of log entries
+    """
+
+    cache_size = CONFIG["cache.log_panel.size"]
+
+    if len(event_listing) > cache_size:
+      del event_listing[cache_size:]
+
+    log_ttl = CONFIG["features.log.entryDuration"]
+
+    if log_ttl > 0:
+      current_day = days_since()
+
+      breakpoint = None  # index at which to crop from
+
+      for i in range(len(event_listing) - 1, -1, -1):
+        days_since_event = current_day - days_since(event_listing[i].timestamp)
+
+        if days_since_event > log_ttl:
+          breakpoint = i  # older than the ttl
+        else:
+          break
+
+      # removes entries older than the ttl
+
+      if breakpoint is not None:
+        del event_listing[breakpoint:]
diff --git a/arm/menu/actions.py b/arm/menu/actions.py
index b11f54f..c968099 100644
--- a/arm/menu/actions.py
+++ b/arm/menu/actions.py
@@ -7,10 +7,10 @@ import functools
 import arm.popups
 import arm.controller
 import arm.menu.item
-import arm.graphing.graphPanel
+import arm.graphing.graph_panel
 import arm.util.tracker
 
-from arm.util import torTools, uiTools
+from arm.util import tor_tools, ui_tools
 
 import stem.util.connection
 
@@ -60,7 +60,7 @@ def make_actions_menu():
   """
 
   control = arm.controller.get_controller()
-  conn = torTools.get_conn()
+  conn = tor_tools.get_conn()
   header_panel = control.get_panel("header")
   actions_menu = arm.menu.item.Submenu("Actions")
   actions_menu.add(arm.menu.item.MenuItem("Close Menu", None))
@@ -103,13 +103,13 @@ def make_view_menu():
 
       view_menu.add(arm.menu.item.SelectionMenuItem(label, page_group, i))
 
-  if uiTools.is_color_supported():
+  if ui_tools.is_color_supported():
     color_menu = arm.menu.item.Submenu("Color")
-    color_group = arm.menu.item.SelectionGroup(uiTools.set_color_override, uiTools.get_color_override())
+    color_group = arm.menu.item.SelectionGroup(ui_tools.set_color_override, ui_tools.get_color_override())
 
     color_menu.add(arm.menu.item.SelectionMenuItem("All", color_group, None))
 
-    for color in uiTools.COLOR_LIST:
+    for color in ui_tools.COLOR_LIST:
       color_menu.add(arm.menu.item.SelectionMenuItem(str_tools._to_camel_case(color), color_group, color))
 
     view_menu.add(color_menu)
@@ -166,8 +166,8 @@ def make_graph_menu(graph_panel):
   interval_menu = arm.menu.item.Submenu("Interval")
   interval_group = arm.menu.item.SelectionGroup(graph_panel.set_update_interval, graph_panel.get_update_interval())
 
-  for i in range(len(arm.graphing.graphPanel.UPDATE_INTERVALS)):
-    label = arm.graphing.graphPanel.UPDATE_INTERVALS[i][0]
+  for i in range(len(arm.graphing.graph_panel.UPDATE_INTERVALS)):
+    label = arm.graphing.graph_panel.UPDATE_INTERVALS[i][0]
     label = str_tools._to_camel_case(label, divider = " ")
     interval_menu.add(arm.menu.item.SelectionMenuItem(label, interval_group, i))
 
@@ -178,7 +178,7 @@ def make_graph_menu(graph_panel):
   bounds_menu = arm.menu.item.Submenu("Bounds")
   bounds_group = arm.menu.item.SelectionGroup(graph_panel.set_bounds_type, graph_panel.get_bounds_type())
 
-  for bounds_type in arm.graphing.graphPanel.Bounds:
+  for bounds_type in arm.graphing.graph_panel.Bounds:
     bounds_menu.add(arm.menu.item.SelectionMenuItem(bounds_type, bounds_group, bounds_type))
 
   graph_menu.add(bounds_menu)
diff --git a/arm/menu/menu.py b/arm/menu/menu.py
index 99286c7..be705e5 100644
--- a/arm/menu/menu.py
+++ b/arm/menu/menu.py
@@ -9,7 +9,7 @@ import arm.controller
 import arm.menu.item
 import arm.menu.actions
 
-from arm.util import uiTools
+from arm.util import ui_tools
 
 
 class MenuCursor:
@@ -40,7 +40,7 @@ class MenuCursor:
     is_selection_submenu = isinstance(self._selection, arm.menu.item.Submenu)
     selection_hierarchy = self._selection.get_hierarchy()
 
-    if uiTools.is_selection_key(key):
+    if ui_tools.is_selection_key(key):
       if is_selection_submenu:
         if not self._selection.is_empty():
           self._selection = self._selection.get_children()[0]
@@ -96,7 +96,7 @@ def show_menu():
       # sets the background color
 
       popup.win.clear()
-      popup.win.bkgd(' ', curses.A_STANDOUT | uiTools.get_color("red"))
+      popup.win.bkgd(' ', curses.A_STANDOUT | ui_tools.get_color("red"))
       selection_hierarchy = cursor.get_selection().get_hierarchy()
 
       # provide a message saying how to close the menu
@@ -172,13 +172,13 @@ def _draw_submenu(cursor, level, top, left):
   try:
     # sets the background color
 
-    popup.win.bkgd(' ', curses.A_STANDOUT | uiTools.get_color("red"))
+    popup.win.bkgd(' ', curses.A_STANDOUT | ui_tools.get_color("red"))
 
     draw_top, selection_top = 0, 0
 
     for menu_item in submenu.get_children():
       if menu_item == selection:
-        draw_format = curses.A_BOLD | uiTools.get_color("white")
+        draw_format = curses.A_BOLD | ui_tools.get_color("white")
         selection_top = draw_top
       else:
         draw_format = curses.A_NORMAL
diff --git a/arm/popups.py b/arm/popups.py
index dcf1f93..be07336 100644
--- a/arm/popups.py
+++ b/arm/popups.py
@@ -7,7 +7,7 @@ import curses
 import arm.controller
 
 from arm import __version__, __release_date__
-from arm.util import panel, uiTools
+from arm.util import panel, ui_tools
 
 
 def init(height = -1, width = -1, top = 0, left = 0, below_static = True):
@@ -177,8 +177,8 @@ def show_help_popup():
   finally:
     finalize()
 
-  if not uiTools.is_selection_key(exit_key) and \
-    not uiTools.is_scroll_key(exit_key) and \
+  if not ui_tools.is_selection_key(exit_key) and \
+    not ui_tools.is_scroll_key(exit_key) and \
     not exit_key in (curses.KEY_LEFT, curses.KEY_RIGHT):
     return exit_key
   else:
@@ -280,7 +280,7 @@ def show_sort_dialog(title, options, old_selection, option_colors):
         cursor_location = max(0, cursor_location - 4)
       elif key == curses.KEY_DOWN:
         cursor_location = min(len(selection_options) - 1, cursor_location + 4)
-      elif uiTools.is_selection_key(key):
+      elif ui_tools.is_selection_key(key):
         selection = selection_options[cursor_location]
 
         if selection == "Cancel":
@@ -321,7 +321,7 @@ def _draw_sort_selection(popup, y, x, prefix, options, option_colors):
 
   for i in range(len(options)):
     sort_type = options[i]
-    sort_color = uiTools.get_color(option_colors.get(sort_type, "white"))
+    sort_color = ui_tools.get_color(option_colors.get(sort_type, "white"))
     popup.addstr(y, x, sort_type, sort_color | curses.A_BOLD)
     x += len(sort_type)
 
@@ -363,7 +363,7 @@ def show_menu(title, options, old_selection):
 
     curses.cbreak()   # wait indefinitely for key presses (no timeout)
 
-    while not uiTools.is_selection_key(key):
+    while not ui_tools.is_selection_key(key):
       popup.win.erase()
       popup.win.box()
       popup.addstr(0, 0, title, curses.A_STANDOUT)
diff --git a/arm/starter.py b/arm/starter.py
index 5b3bfc3..8bb1300 100644
--- a/arm/starter.py
+++ b/arm/starter.py
@@ -17,10 +17,10 @@ import arm
 import arm.arguments
 import arm.controller
 import arm.util.panel
-import arm.util.torConfig
-import arm.util.torTools
+import arm.util.tor_config
+import arm.util.tor_tools
 import arm.util.tracker
-import arm.util.uiTools
+import arm.util.ui_tools
 
 import stem
 import stem.util.conf
@@ -69,10 +69,10 @@ def main():
     controller = init_controller(args)
     authenticate(controller, CONFIG.get('tor.password', None), CONFIG.get('tor.chroot', ''))
 
-    # TODO: Our tor_controller() method will gradually replace the torTools
+    # TODO: Our tor_controller() method will gradually replace the tor_tools
     # module, but until that we need to initialize it too.
 
-    arm.util.torTools.get_conn().init(controller)
+    arm.util.tor_tools.get_conn().init(controller)
   except ValueError as exc:
     print exc
     exit(1)
@@ -232,7 +232,7 @@ def _load_tor_config_descriptions():
   Attempt to determine descriptions for tor's configuration options.
   """
 
-  arm.util.torConfig.load_configuration_descriptions(BASE_DIR)
+  arm.util.tor_config.load_configuration_descriptions(BASE_DIR)
 
 
 def _use_english_subcommands():
@@ -256,7 +256,7 @@ def _use_unicode():
 
   is_lang_unicode = "utf-" in os.getenv("LANG", "").lower()
 
-  if is_lang_unicode and arm.util.uiTools.is_wide_characters_supported():
+  if is_lang_unicode and arm.util.ui_tools.is_wide_characters_supported():
     locale.setlocale(locale.LC_ALL, '')
 
 
diff --git a/arm/torrcPanel.py b/arm/torrcPanel.py
deleted file mode 100644
index 51ff1a1..0000000
--- a/arm/torrcPanel.py
+++ /dev/null
@@ -1,353 +0,0 @@
-"""
-Panel displaying the torrc or armrc with the validation done against it.
-"""
-
-import math
-import curses
-import threading
-
-import arm.popups
-
-from arm.util import panel, torConfig, torTools, uiTools
-
-from stem.control import State
-from stem.util import conf, enum
-
-
-def conf_handler(key, value):
-  if key == "features.config.file.max_lines_per_entry":
-    return max(1, value)
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.config.file.showScrollbars": True,
-  "features.config.file.max_lines_per_entry": 8,
-}, conf_handler)
-
-# TODO: The armrc use case is incomplete. There should be equivilant reloading
-# and validation capabilities to the torrc.
-Config = enum.Enum("TORRC", "ARMRC")  # configuration file types that can be displayed
-
-
-class TorrcPanel(panel.Panel):
-  """
-  Renders the current torrc or armrc with syntax highlighting in a scrollable
-  area.
-  """
-
-  def __init__(self, stdscr, config_type):
-    panel.Panel.__init__(self, stdscr, "torrc", 0)
-
-    self.vals_lock = threading.RLock()
-    self.config_type = config_type
-    self.scroll = 0
-    self.show_line_num = True     # shows left aligned line numbers
-    self.strip_comments = False   # drops comments and extra whitespace
-
-    # height of the content when last rendered (the cached value is invalid if
-    # _last_content_height_args is None or differs from the current dimensions)
-
-    self._last_content_height = 1
-    self._last_content_height_args = None
-
-    # listens for tor reload (sighup) events
-
-    conn = torTools.get_conn()
-    conn.add_status_listener(self.reset_listener)
-
-    if conn.is_alive():
-      self.reset_listener(None, State.INIT, None)
-
-  def reset_listener(self, controller, event_type, _):
-    """
-    Reloads and displays the torrc on tor reload (sighup) events.
-    """
-
-    if event_type == State.INIT:
-      # loads the torrc and provides warnings in case of validation errors
-
-      try:
-        loaded_torrc = torConfig.get_torrc()
-        loaded_torrc.load(True)
-        loaded_torrc.log_validation_issues()
-        self.redraw(True)
-      except:
-        pass
-    elif event_type == State.RESET:
-      try:
-        torConfig.get_torrc().load(True)
-        self.redraw(True)
-      except:
-        pass
-
-  def set_comments_visible(self, is_visible):
-    """
-    Sets if comments and blank lines are shown or stripped.
-
-    Arguments:
-      is_visible - displayed comments and blank lines if true, strips otherwise
-    """
-
-    self.strip_comments = not is_visible
-    self._last_content_height_args = None
-    self.redraw(True)
-
-  def set_line_number_visible(self, is_visible):
-    """
-    Sets if line numbers are shown or hidden.
-
-    Arguments:
-      is_visible - displays line numbers if true, hides otherwise
-    """
-
-    self.show_line_num = is_visible
-    self._last_content_height_args = None
-    self.redraw(True)
-
-  def reload_torrc(self):
-    """
-    Reloads the torrc, displaying an indicator of success or failure.
-    """
-
-    try:
-      torConfig.get_torrc().load()
-      self._last_content_height_args = None
-      self.redraw(True)
-      result_msg = "torrc reloaded"
-    except IOError:
-      result_msg = "failed to reload torrc"
-
-    self._last_content_height_args = None
-    self.redraw(True)
-    arm.popups.show_msg(result_msg, 1)
-
-  def handle_key(self, key):
-    self.vals_lock.acquire()
-    is_keystroke_consumed = True
-    if uiTools.is_scroll_key(key):
-      page_height = self.get_preferred_size()[0] - 1
-      new_scroll = uiTools.get_scroll_position(key, self.scroll, page_height, self._last_content_height)
-
-      if self.scroll != new_scroll:
-        self.scroll = new_scroll
-        self.redraw(True)
-    elif key == ord('n') or key == ord('N'):
-      self.set_line_number_visible(not self.show_line_num)
-    elif key == ord('s') or key == ord('S'):
-      self.set_comments_visible(self.strip_comments)
-    elif key == ord('r') or key == ord('R'):
-      self.reload_torrc()
-    else:
-      is_keystroke_consumed = False
-
-    self.vals_lock.release()
-    return is_keystroke_consumed
-
-  def set_visible(self, is_visible):
-    if not is_visible:
-      self._last_content_height_args = None  # redraws when next displayed
-
-    panel.Panel.set_visible(self, is_visible)
-
-  def get_help(self):
-    options = []
-    options.append(("up arrow", "scroll up a line", None))
-    options.append(("down arrow", "scroll down a line", None))
-    options.append(("page up", "scroll up a page", None))
-    options.append(("page down", "scroll down a page", None))
-    options.append(("s", "comment stripping", "on" if self.strip_comments else "off"))
-    options.append(("n", "line numbering", "on" if self.show_line_num else "off"))
-    options.append(("r", "reload torrc", None))
-    options.append(("x", "reset tor (issue sighup)", None))
-    return options
-
-  def draw(self, width, height):
-    self.vals_lock.acquire()
-
-    # If true, we assume that the cached value in self._last_content_height is
-    # still accurate, and stop drawing when there's nothing more to display.
-    # Otherwise the self._last_content_height is suspect, and we'll process all
-    # the content to check if it's right (and redraw again with the corrected
-    # height if not).
-
-    trust_last_content_height = self._last_content_height_args == (width, height)
-
-    # restricts scroll location to valid bounds
-
-    self.scroll = max(0, min(self.scroll, self._last_content_height - height + 1))
-
-    rendered_contents, corrections, conf_location = None, {}, None
-
-    if self.config_type == Config.TORRC:
-      loaded_torrc = torConfig.get_torrc()
-      loaded_torrc.get_lock().acquire()
-      conf_location = loaded_torrc.get_config_location()
-
-      if not loaded_torrc.is_loaded():
-        rendered_contents = ["### Unable to load the torrc ###"]
-      else:
-        rendered_contents = loaded_torrc.get_display_contents(self.strip_comments)
-
-        # constructs a mapping of line numbers to the issue on it
-
-        corrections = dict((line_number, (issue, msg)) for line_number, issue, msg in loaded_torrc.get_corrections())
-
-      loaded_torrc.get_lock().release()
-    else:
-      loaded_armrc = conf.get_config("arm")
-      conf_location = loaded_armrc._path
-      rendered_contents = list(loaded_armrc._raw_contents)
-
-    # offset to make room for the line numbers
-
-    line_number_offset = 0
-
-    if self.show_line_num:
-      if len(rendered_contents) == 0:
-        line_number_offset = 2
-      else:
-        line_number_offset = int(math.log10(len(rendered_contents))) + 2
-
-    # draws left-hand scroll bar if content's longer than the height
-
-    scroll_offset = 0
-
-    if CONFIG["features.config.file.showScrollbars"] and self._last_content_height > height - 1:
-      scroll_offset = 3
-      self.add_scroll_bar(self.scroll, self.scroll + height - 1, self._last_content_height, 1)
-
-    display_line = -self.scroll + 1  # line we're drawing on
-
-    # draws the top label
-
-    if self.is_title_visible():
-      source_label = "Tor" if self.config_type == Config.TORRC else "Arm"
-      location_label = " (%s)" % conf_location if conf_location else ""
-      self.addstr(0, 0, "%s Configuration File%s:" % (source_label, location_label), curses.A_STANDOUT)
-
-    is_multiline = False  # true if we're in the middle of a multiline torrc entry
-
-    for line_number in range(0, len(rendered_contents)):
-      line_text = rendered_contents[line_number]
-      line_text = line_text.rstrip()  # remove ending whitespace
-
-      # blank lines are hidden when stripping comments
-
-      if self.strip_comments and not line_text:
-        continue
-
-      # splits the line into its component (msg, format) tuples
-
-      line_comp = {
-        "option": ["", curses.A_BOLD | uiTools.get_color("green")],
-        "argument": ["", curses.A_BOLD | uiTools.get_color("cyan")],
-        "correction": ["", curses.A_BOLD | uiTools.get_color("cyan")],
-        "comment": ["", uiTools.get_color("white")],
-      }
-
-      # parses the comment
-
-      comment_index = line_text.find("#")
-
-      if comment_index != -1:
-        line_comp["comment"][0] = line_text[comment_index:]
-        line_text = line_text[:comment_index]
-
-      # splits the option and argument, preserving any whitespace around them
-
-      stripped_line = line_text.strip()
-      option_index = stripped_line.find(" ")
-
-      if is_multiline:
-        # part of a multiline entry started on a previous line so everything
-        # is part of the argument
-        line_comp["argument"][0] = line_text
-      elif option_index == -1:
-        # no argument provided
-        line_comp["option"][0] = line_text
-      else:
-        option_text = stripped_line[:option_index]
-        option_end = line_text.find(option_text) + len(option_text)
-        line_comp["option"][0] = line_text[:option_end]
-        line_comp["argument"][0] = line_text[option_end:]
-
-      # flags following lines as belonging to this multiline entry if it ends
-      # with a slash
-
-      if stripped_line:
-        is_multiline = stripped_line.endswith("\\")
-
-      # gets the correction
-
-      if line_number in corrections:
-        line_issue, line_issue_msg = corrections[line_number]
-
-        if line_issue in (torConfig.ValidationError.DUPLICATE, torConfig.ValidationError.IS_DEFAULT):
-          line_comp["option"][1] = curses.A_BOLD | uiTools.get_color("blue")
-          line_comp["argument"][1] = curses.A_BOLD | uiTools.get_color("blue")
-        elif line_issue == torConfig.ValidationError.MISMATCH:
-          line_comp["argument"][1] = curses.A_BOLD | uiTools.get_color("red")
-          line_comp["correction"][0] = " (%s)" % line_issue_msg
-        else:
-          # For some types of configs the correction field is simply used to
-          # provide extra data (for instance, the type for tor state fields).
-
-          line_comp["correction"][0] = " (%s)" % line_issue_msg
-          line_comp["correction"][1] = curses.A_BOLD | uiTools.get_color("magenta")
-
-      # draws the line number
-
-      if self.show_line_num and display_line < height and display_line >= 1:
-        line_number_str = ("%%%ii" % (line_number_offset - 1)) % (line_number + 1)
-        self.addstr(display_line, scroll_offset, line_number_str, curses.A_BOLD | uiTools.get_color("yellow"))
-
-      # draws the rest of the components with line wrap
-
-      cursor_location, line_offset = line_number_offset + scroll_offset, 0
-      max_lines_per_entry = CONFIG["features.config.file.max_lines_per_entry"]
-      display_queue = [line_comp[entry] for entry in ("option", "argument", "correction", "comment")]
-
-      while display_queue:
-        msg, format = display_queue.pop(0)
-
-        max_msg_size, include_break = width - cursor_location, False
-
-        if len(msg) >= max_msg_size:
-          # message is too long - break it up
-
-          if line_offset == max_lines_per_entry - 1:
-            msg = uiTools.crop_str(msg, max_msg_size)
-          else:
-            include_break = True
-            msg, remainder = uiTools.crop_str(msg, max_msg_size, 4, 4, uiTools.Ending.HYPHEN, True)
-            display_queue.insert(0, (remainder.strip(), format))
-
-        draw_line = display_line + line_offset
-
-        if msg and draw_line < height and draw_line >= 1:
-          self.addstr(draw_line, cursor_location, msg, format)
-
-        # If we're done, and have added content to this line, then start
-        # further content on the next line.
-
-        cursor_location += len(msg)
-        include_break |= not display_queue and cursor_location != line_number_offset + scroll_offset
-
-        if include_break:
-          line_offset += 1
-          cursor_location = line_number_offset + scroll_offset
-
-      display_line += max(line_offset, 1)
-
-      if trust_last_content_height and display_line >= height:
-        break
-
-    if not trust_last_content_height:
-      self._last_content_height_args = (width, height)
-      new_content_height = display_line + self.scroll - 1
-
-      if self._last_content_height != new_content_height:
-        self._last_content_height = new_content_height
-        self.redraw(True)
-
-    self.vals_lock.release()
diff --git a/arm/torrc_panel.py b/arm/torrc_panel.py
new file mode 100644
index 0000000..7957abc
--- /dev/null
+++ b/arm/torrc_panel.py
@@ -0,0 +1,353 @@
+"""
+Panel displaying the torrc or armrc with the validation done against it.
+"""
+
+import math
+import curses
+import threading
+
+import arm.popups
+
+from arm.util import panel, tor_config, tor_tools, ui_tools
+
+from stem.control import State
+from stem.util import conf, enum
+
+
+def conf_handler(key, value):
+  if key == "features.config.file.max_lines_per_entry":
+    return max(1, value)
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.config.file.showScrollbars": True,
+  "features.config.file.max_lines_per_entry": 8,
+}, conf_handler)
+
+# TODO: The armrc use case is incomplete. There should be equivilant reloading
+# and validation capabilities to the torrc.
+Config = enum.Enum("TORRC", "ARMRC")  # configuration file types that can be displayed
+
+
+class TorrcPanel(panel.Panel):
+  """
+  Renders the current torrc or armrc with syntax highlighting in a scrollable
+  area.
+  """
+
+  def __init__(self, stdscr, config_type):
+    panel.Panel.__init__(self, stdscr, "torrc", 0)
+
+    self.vals_lock = threading.RLock()
+    self.config_type = config_type
+    self.scroll = 0
+    self.show_line_num = True     # shows left aligned line numbers
+    self.strip_comments = False   # drops comments and extra whitespace
+
+    # height of the content when last rendered (the cached value is invalid if
+    # _last_content_height_args is None or differs from the current dimensions)
+
+    self._last_content_height = 1
+    self._last_content_height_args = None
+
+    # listens for tor reload (sighup) events
+
+    conn = tor_tools.get_conn()
+    conn.add_status_listener(self.reset_listener)
+
+    if conn.is_alive():
+      self.reset_listener(None, State.INIT, None)
+
+  def reset_listener(self, controller, event_type, _):
+    """
+    Reloads and displays the torrc on tor reload (sighup) events.
+    """
+
+    if event_type == State.INIT:
+      # loads the torrc and provides warnings in case of validation errors
+
+      try:
+        loaded_torrc = tor_config.get_torrc()
+        loaded_torrc.load(True)
+        loaded_torrc.log_validation_issues()
+        self.redraw(True)
+      except:
+        pass
+    elif event_type == State.RESET:
+      try:
+        tor_config.get_torrc().load(True)
+        self.redraw(True)
+      except:
+        pass
+
+  def set_comments_visible(self, is_visible):
+    """
+    Sets if comments and blank lines are shown or stripped.
+
+    Arguments:
+      is_visible - displayed comments and blank lines if true, strips otherwise
+    """
+
+    self.strip_comments = not is_visible
+    self._last_content_height_args = None
+    self.redraw(True)
+
+  def set_line_number_visible(self, is_visible):
+    """
+    Sets if line numbers are shown or hidden.
+
+    Arguments:
+      is_visible - displays line numbers if true, hides otherwise
+    """
+
+    self.show_line_num = is_visible
+    self._last_content_height_args = None
+    self.redraw(True)
+
+  def reload_torrc(self):
+    """
+    Reloads the torrc, displaying an indicator of success or failure.
+    """
+
+    try:
+      tor_config.get_torrc().load()
+      self._last_content_height_args = None
+      self.redraw(True)
+      result_msg = "torrc reloaded"
+    except IOError:
+      result_msg = "failed to reload torrc"
+
+    self._last_content_height_args = None
+    self.redraw(True)
+    arm.popups.show_msg(result_msg, 1)
+
+  def handle_key(self, key):
+    self.vals_lock.acquire()
+    is_keystroke_consumed = True
+    if ui_tools.is_scroll_key(key):
+      page_height = self.get_preferred_size()[0] - 1
+      new_scroll = ui_tools.get_scroll_position(key, self.scroll, page_height, self._last_content_height)
+
+      if self.scroll != new_scroll:
+        self.scroll = new_scroll
+        self.redraw(True)
+    elif key == ord('n') or key == ord('N'):
+      self.set_line_number_visible(not self.show_line_num)
+    elif key == ord('s') or key == ord('S'):
+      self.set_comments_visible(self.strip_comments)
+    elif key == ord('r') or key == ord('R'):
+      self.reload_torrc()
+    else:
+      is_keystroke_consumed = False
+
+    self.vals_lock.release()
+    return is_keystroke_consumed
+
+  def set_visible(self, is_visible):
+    if not is_visible:
+      self._last_content_height_args = None  # redraws when next displayed
+
+    panel.Panel.set_visible(self, is_visible)
+
+  def get_help(self):
+    options = []
+    options.append(("up arrow", "scroll up a line", None))
+    options.append(("down arrow", "scroll down a line", None))
+    options.append(("page up", "scroll up a page", None))
+    options.append(("page down", "scroll down a page", None))
+    options.append(("s", "comment stripping", "on" if self.strip_comments else "off"))
+    options.append(("n", "line numbering", "on" if self.show_line_num else "off"))
+    options.append(("r", "reload torrc", None))
+    options.append(("x", "reset tor (issue sighup)", None))
+    return options
+
+  def draw(self, width, height):
+    self.vals_lock.acquire()
+
+    # If true, we assume that the cached value in self._last_content_height is
+    # still accurate, and stop drawing when there's nothing more to display.
+    # Otherwise the self._last_content_height is suspect, and we'll process all
+    # the content to check if it's right (and redraw again with the corrected
+    # height if not).
+
+    trust_last_content_height = self._last_content_height_args == (width, height)
+
+    # restricts scroll location to valid bounds
+
+    self.scroll = max(0, min(self.scroll, self._last_content_height - height + 1))
+
+    rendered_contents, corrections, conf_location = None, {}, None
+
+    if self.config_type == Config.TORRC:
+      loaded_torrc = tor_config.get_torrc()
+      loaded_torrc.get_lock().acquire()
+      conf_location = loaded_torrc.get_config_location()
+
+      if not loaded_torrc.is_loaded():
+        rendered_contents = ["### Unable to load the torrc ###"]
+      else:
+        rendered_contents = loaded_torrc.get_display_contents(self.strip_comments)
+
+        # constructs a mapping of line numbers to the issue on it
+
+        corrections = dict((line_number, (issue, msg)) for line_number, issue, msg in loaded_torrc.get_corrections())
+
+      loaded_torrc.get_lock().release()
+    else:
+      loaded_armrc = conf.get_config("arm")
+      conf_location = loaded_armrc._path
+      rendered_contents = list(loaded_armrc._raw_contents)
+
+    # offset to make room for the line numbers
+
+    line_number_offset = 0
+
+    if self.show_line_num:
+      if len(rendered_contents) == 0:
+        line_number_offset = 2
+      else:
+        line_number_offset = int(math.log10(len(rendered_contents))) + 2
+
+    # draws left-hand scroll bar if content's longer than the height
+
+    scroll_offset = 0
+
+    if CONFIG["features.config.file.showScrollbars"] and self._last_content_height > height - 1:
+      scroll_offset = 3
+      self.add_scroll_bar(self.scroll, self.scroll + height - 1, self._last_content_height, 1)
+
+    display_line = -self.scroll + 1  # line we're drawing on
+
+    # draws the top label
+
+    if self.is_title_visible():
+      source_label = "Tor" if self.config_type == Config.TORRC else "Arm"
+      location_label = " (%s)" % conf_location if conf_location else ""
+      self.addstr(0, 0, "%s Configuration File%s:" % (source_label, location_label), curses.A_STANDOUT)
+
+    is_multiline = False  # true if we're in the middle of a multiline torrc entry
+
+    for line_number in range(0, len(rendered_contents)):
+      line_text = rendered_contents[line_number]
+      line_text = line_text.rstrip()  # remove ending whitespace
+
+      # blank lines are hidden when stripping comments
+
+      if self.strip_comments and not line_text:
+        continue
+
+      # splits the line into its component (msg, format) tuples
+
+      line_comp = {
+        "option": ["", curses.A_BOLD | ui_tools.get_color("green")],
+        "argument": ["", curses.A_BOLD | ui_tools.get_color("cyan")],
+        "correction": ["", curses.A_BOLD | ui_tools.get_color("cyan")],
+        "comment": ["", ui_tools.get_color("white")],
+      }
+
+      # parses the comment
+
+      comment_index = line_text.find("#")
+
+      if comment_index != -1:
+        line_comp["comment"][0] = line_text[comment_index:]
+        line_text = line_text[:comment_index]
+
+      # splits the option and argument, preserving any whitespace around them
+
+      stripped_line = line_text.strip()
+      option_index = stripped_line.find(" ")
+
+      if is_multiline:
+        # part of a multiline entry started on a previous line so everything
+        # is part of the argument
+        line_comp["argument"][0] = line_text
+      elif option_index == -1:
+        # no argument provided
+        line_comp["option"][0] = line_text
+      else:
+        option_text = stripped_line[:option_index]
+        option_end = line_text.find(option_text) + len(option_text)
+        line_comp["option"][0] = line_text[:option_end]
+        line_comp["argument"][0] = line_text[option_end:]
+
+      # flags following lines as belonging to this multiline entry if it ends
+      # with a slash
+
+      if stripped_line:
+        is_multiline = stripped_line.endswith("\\")
+
+      # gets the correction
+
+      if line_number in corrections:
+        line_issue, line_issue_msg = corrections[line_number]
+
+        if line_issue in (tor_config.ValidationError.DUPLICATE, tor_config.ValidationError.IS_DEFAULT):
+          line_comp["option"][1] = curses.A_BOLD | ui_tools.get_color("blue")
+          line_comp["argument"][1] = curses.A_BOLD | ui_tools.get_color("blue")
+        elif line_issue == tor_config.ValidationError.MISMATCH:
+          line_comp["argument"][1] = curses.A_BOLD | ui_tools.get_color("red")
+          line_comp["correction"][0] = " (%s)" % line_issue_msg
+        else:
+          # For some types of configs the correction field is simply used to
+          # provide extra data (for instance, the type for tor state fields).
+
+          line_comp["correction"][0] = " (%s)" % line_issue_msg
+          line_comp["correction"][1] = curses.A_BOLD | ui_tools.get_color("magenta")
+
+      # draws the line number
+
+      if self.show_line_num and display_line < height and display_line >= 1:
+        line_number_str = ("%%%ii" % (line_number_offset - 1)) % (line_number + 1)
+        self.addstr(display_line, scroll_offset, line_number_str, curses.A_BOLD | ui_tools.get_color("yellow"))
+
+      # draws the rest of the components with line wrap
+
+      cursor_location, line_offset = line_number_offset + scroll_offset, 0
+      max_lines_per_entry = CONFIG["features.config.file.max_lines_per_entry"]
+      display_queue = [line_comp[entry] for entry in ("option", "argument", "correction", "comment")]
+
+      while display_queue:
+        msg, format = display_queue.pop(0)
+
+        max_msg_size, include_break = width - cursor_location, False
+
+        if len(msg) >= max_msg_size:
+          # message is too long - break it up
+
+          if line_offset == max_lines_per_entry - 1:
+            msg = ui_tools.crop_str(msg, max_msg_size)
+          else:
+            include_break = True
+            msg, remainder = ui_tools.crop_str(msg, max_msg_size, 4, 4, ui_tools.Ending.HYPHEN, True)
+            display_queue.insert(0, (remainder.strip(), format))
+
+        draw_line = display_line + line_offset
+
+        if msg and draw_line < height and draw_line >= 1:
+          self.addstr(draw_line, cursor_location, msg, format)
+
+        # If we're done, and have added content to this line, then start
+        # further content on the next line.
+
+        cursor_location += len(msg)
+        include_break |= not display_queue and cursor_location != line_number_offset + scroll_offset
+
+        if include_break:
+          line_offset += 1
+          cursor_location = line_number_offset + scroll_offset
+
+      display_line += max(line_offset, 1)
+
+      if trust_last_content_height and display_line >= height:
+        break
+
+    if not trust_last_content_height:
+      self._last_content_height_args = (width, height)
+      new_content_height = display_line + self.scroll - 1
+
+      if self._last_content_height != new_content_height:
+        self._last_content_height = new_content_height
+        self.redraw(True)
+
+    self.vals_lock.release()
diff --git a/arm/util/__init__.py b/arm/util/__init__.py
index e597b26..d94173b 100644
--- a/arm/util/__init__.py
+++ b/arm/util/__init__.py
@@ -4,7 +4,7 @@ application's status, making cross platform system calls, parsing tor data,
 and safely working with curses (hiding some of the gory details).
 """
 
-__all__ = ["connections", "panel", "sysTools", "textInput", "torConfig", "torTools", "tracker", "uiTools"]
+__all__ = ["connections", "panel", "sysTools", "text_input", "tor_config", "tor_tools", "tracker", "ui_tools"]
 
 import getpass
 import os
diff --git a/arm/util/panel.py b/arm/util/panel.py
index c34ac5e..c91a4ec 100644
--- a/arm/util/panel.py
+++ b/arm/util/panel.py
@@ -9,7 +9,7 @@ import curses.ascii
 import curses.textpad
 from threading import RLock
 
-from arm.util import textInput, uiTools
+from arm.util import text_input, ui_tools
 
 from stem.util import log
 
@@ -32,8 +32,8 @@ FORMAT_TAGS = {
   "<h>": (_no_op, curses.A_STANDOUT),
 }
 
-for color_label in uiTools.COLOR_LIST:
-  FORMAT_TAGS["<%s>" % color_label] = (uiTools.get_color, color_label)
+for color_label in ui_tools.COLOR_LIST:
+  FORMAT_TAGS["<%s>" % color_label] = (ui_tools.get_color, color_label)
 
 # prevents curses redraws if set
 HALT_ACTIVITY = False
@@ -520,7 +520,7 @@ class Panel():
     <b>text</b>               bold
     <u>text</u>               underline
     <h>text</h>               highlight
-    <[color]>text</[color]>   use color (see uiTools.get_color() for constants)
+    <[color]>text</[color]>   use color (see ui_tools.get_color() for constants)
 
     Tag nesting is supported and tag closing is strictly enforced (raising an
     exception for invalid formatting). Unrecognized tags are treated as normal
@@ -662,7 +662,7 @@ class Panel():
     textbox = curses.textpad.Textbox(input_subwindow)
 
     if not validator:
-      validator = textInput.BasicValidator()
+      validator = text_input.BasicValidator()
 
     textbox.win.attron(text_format)
     user_input = textbox.edit(lambda key: validator.validate(key, textbox)).strip()
diff --git a/arm/util/textInput.py b/arm/util/textInput.py
deleted file mode 100644
index 4faec1e..0000000
--- a/arm/util/textInput.py
+++ /dev/null
@@ -1,213 +0,0 @@
-"""
-Provides input validators that provide text input with various capabilities.
-These can be chained together with the first matching validator taking
-precidence.
-"""
-
-import os
-import curses
-
-PASS = -1
-
-
-class TextInputValidator:
-  """
-  Basic interface for validators. Implementations should override the handle_key
-  method.
-  """
-
-  def __init__(self, next_validator = None):
-    self.next_validator = next_validator
-
-  def validate(self, key, textbox):
-    """
-    Processes the given key input for the textbox. This may modify the
-    textbox's content, cursor position, etc depending on the functionality
-    of the validator. This returns the key that the textbox should interpret,
-    PASS if this validator doesn't want to take any action.
-
-    Arguments:
-      key     - key code input from the user
-      textbox - curses Textbox instance the input came from
-    """
-
-    result = self.handle_key(key, textbox)
-
-    if result != PASS:
-      return result
-    elif self.next_validator:
-      return self.next_validator.validate(key, textbox)
-    else:
-      return key
-
-  def handle_key(self, key, textbox):
-    """
-    Process the given keycode with this validator, returning the keycode for
-    the textbox to process, and PASS if this doesn't want to modify it.
-
-    Arguments:
-      key     - key code input from the user
-      textbox - curses Textbox instance the input came from
-    """
-
-    return PASS
-
-
-class BasicValidator(TextInputValidator):
-  """
-  Interceptor for keystrokes given to a textbox, doing the following:
-  - quits by setting the input to curses.ascii.BEL when escape is pressed
-  - stops the cursor at the end of the box's content when pressing the right
-    arrow
-  - home and end keys move to the start/end of the line
-  """
-
-  def handle_key(self, key, textbox):
-    y, x = textbox.win.getyx()
-
-    if curses.ascii.isprint(key) and x < textbox.maxx:
-      # Shifts the existing text forward so input is an insert method rather
-      # than replacement. The curses.textpad accepts an insert mode flag but
-      # this has a couple issues...
-      # - The flag is only available for Python 2.6+, before that the
-      #   constructor only accepted a subwindow argument as per:
-      #   https://trac.torproject.org/projects/tor/ticket/2354
-      # - The textpad doesn't shift text that has text attributes. This is
-      #   because keycodes read by textbox.win.inch() includes formatting,
-      #   causing the curses.ascii.isprint() check it does to fail.
-
-      current_input = textbox.gather()
-      textbox.win.addstr(y, x + 1, current_input[x:textbox.maxx - 1])
-      textbox.win.move(y, x)  # reverts cursor movement during gather call
-    elif key == 27:
-      # curses.ascii.BEL is a character codes that causes textpad to terminate
-
-      return curses.ascii.BEL
-    elif key == curses.KEY_HOME:
-      textbox.win.move(y, 0)
-      return None
-    elif key in (curses.KEY_END, curses.KEY_RIGHT):
-      msg_length = len(textbox.gather())
-      textbox.win.move(y, x)  # reverts cursor movement during gather call
-
-      if key == curses.KEY_END and msg_length > 0 and x < msg_length - 1:
-        # if we're in the content then move to the end
-
-        textbox.win.move(y, msg_length - 1)
-        return None
-      elif key == curses.KEY_RIGHT and x >= msg_length - 1:
-        # don't move the cursor if there's no content after it
-
-        return None
-    elif key == 410:
-      # if we're resizing the display during text entry then cancel it
-      # (otherwise the input field is filled with nonprintable characters)
-
-      return curses.ascii.BEL
-
-    return PASS
-
-
-class HistoryValidator(TextInputValidator):
-  """
-  This intercepts the up and down arrow keys to scroll through a backlog of
-  previous commands.
-  """
-
-  def __init__(self, command_backlog = [], next_validator = None):
-    TextInputValidator.__init__(self, next_validator)
-
-    # contents that can be scrolled back through, newest to oldest
-
-    self.command_backlog = command_backlog
-
-    # selected item from the backlog, -1 if we're not on a backlog item
-
-    self.selection_index = -1
-
-    # the fields input prior to selecting a backlog item
-
-    self.custom_input = ""
-
-  def handle_key(self, key, textbox):
-    if key in (curses.KEY_UP, curses.KEY_DOWN):
-      offset = 1 if key == curses.KEY_UP else -1
-      new_selection = self.selection_index + offset
-
-      # constrains the new selection to valid bounds
-
-      new_selection = max(-1, new_selection)
-      new_selection = min(len(self.command_backlog) - 1, new_selection)
-
-      # skips if this is a no-op
-
-      if self.selection_index == new_selection:
-        return None
-
-      # saves the previous input if we weren't on the backlog
-
-      if self.selection_index == -1:
-        self.custom_input = textbox.gather().strip()
-
-      if new_selection == -1:
-        new_input = self.custom_input
-      else:
-        new_input = self.command_backlog[new_selection]
-
-      y, _ = textbox.win.getyx()
-      _, max_x = textbox.win.getmaxyx()
-      textbox.win.clear()
-      textbox.win.addstr(y, 0, new_input[:max_x - 1])
-      textbox.win.move(y, min(len(new_input), max_x - 1))
-
-      self.selection_index = new_selection
-      return None
-
-    return PASS
-
-
-class TabCompleter(TextInputValidator):
-  """
-  Provides tab completion based on the current input, finishing if there's only
-  a single match. This expects a functor that accepts the current input and
-  provides matches.
-  """
-
-  def __init__(self, completer, next_validator = None):
-    TextInputValidator.__init__(self, next_validator)
-
-    # functor that accepts a string and gives a list of matches
-
-    self.completer = completer
-
-  def handle_key(self, key, textbox):
-    # Matches against the tab key. The ord('\t') is nine, though strangely none
-    # of the curses.KEY_*TAB constants match this...
-
-    if key == 9:
-      current_contents = textbox.gather().strip()
-      matches = self.completer(current_contents)
-      new_input = None
-
-      if len(matches) == 1:
-        # only a single match, fill it in
-        new_input = matches[0]
-      elif len(matches) > 1:
-        # looks for a common prefix we can complete
-        common_prefix = os.path.commonprefix(matches)  # weird that this comes from path...
-
-        if common_prefix != current_contents:
-          new_input = common_prefix
-
-        # TODO: somehow display matches... this is not gonna be fun
-
-      if new_input:
-        y, _ = textbox.win.getyx()
-        _, max_x = textbox.win.getmaxyx()
-        textbox.win.clear()
-        textbox.win.addstr(y, 0, new_input[:max_x - 1])
-        textbox.win.move(y, min(len(new_input), max_x - 1))
-
-      return None
-
-    return PASS
diff --git a/arm/util/text_input.py b/arm/util/text_input.py
new file mode 100644
index 0000000..4faec1e
--- /dev/null
+++ b/arm/util/text_input.py
@@ -0,0 +1,213 @@
+"""
+Provides input validators that provide text input with various capabilities.
+These can be chained together with the first matching validator taking
+precidence.
+"""
+
+import os
+import curses
+
+PASS = -1
+
+
+class TextInputValidator:
+  """
+  Basic interface for validators. Implementations should override the handle_key
+  method.
+  """
+
+  def __init__(self, next_validator = None):
+    self.next_validator = next_validator
+
+  def validate(self, key, textbox):
+    """
+    Processes the given key input for the textbox. This may modify the
+    textbox's content, cursor position, etc depending on the functionality
+    of the validator. This returns the key that the textbox should interpret,
+    PASS if this validator doesn't want to take any action.
+
+    Arguments:
+      key     - key code input from the user
+      textbox - curses Textbox instance the input came from
+    """
+
+    result = self.handle_key(key, textbox)
+
+    if result != PASS:
+      return result
+    elif self.next_validator:
+      return self.next_validator.validate(key, textbox)
+    else:
+      return key
+
+  def handle_key(self, key, textbox):
+    """
+    Process the given keycode with this validator, returning the keycode for
+    the textbox to process, and PASS if this doesn't want to modify it.
+
+    Arguments:
+      key     - key code input from the user
+      textbox - curses Textbox instance the input came from
+    """
+
+    return PASS
+
+
+class BasicValidator(TextInputValidator):
+  """
+  Interceptor for keystrokes given to a textbox, doing the following:
+  - quits by setting the input to curses.ascii.BEL when escape is pressed
+  - stops the cursor at the end of the box's content when pressing the right
+    arrow
+  - home and end keys move to the start/end of the line
+  """
+
+  def handle_key(self, key, textbox):
+    y, x = textbox.win.getyx()
+
+    if curses.ascii.isprint(key) and x < textbox.maxx:
+      # Shifts the existing text forward so input is an insert method rather
+      # than replacement. The curses.textpad accepts an insert mode flag but
+      # this has a couple issues...
+      # - The flag is only available for Python 2.6+, before that the
+      #   constructor only accepted a subwindow argument as per:
+      #   https://trac.torproject.org/projects/tor/ticket/2354
+      # - The textpad doesn't shift text that has text attributes. This is
+      #   because keycodes read by textbox.win.inch() includes formatting,
+      #   causing the curses.ascii.isprint() check it does to fail.
+
+      current_input = textbox.gather()
+      textbox.win.addstr(y, x + 1, current_input[x:textbox.maxx - 1])
+      textbox.win.move(y, x)  # reverts cursor movement during gather call
+    elif key == 27:
+      # curses.ascii.BEL is a character codes that causes textpad to terminate
+
+      return curses.ascii.BEL
+    elif key == curses.KEY_HOME:
+      textbox.win.move(y, 0)
+      return None
+    elif key in (curses.KEY_END, curses.KEY_RIGHT):
+      msg_length = len(textbox.gather())
+      textbox.win.move(y, x)  # reverts cursor movement during gather call
+
+      if key == curses.KEY_END and msg_length > 0 and x < msg_length - 1:
+        # if we're in the content then move to the end
+
+        textbox.win.move(y, msg_length - 1)
+        return None
+      elif key == curses.KEY_RIGHT and x >= msg_length - 1:
+        # don't move the cursor if there's no content after it
+
+        return None
+    elif key == 410:
+      # if we're resizing the display during text entry then cancel it
+      # (otherwise the input field is filled with nonprintable characters)
+
+      return curses.ascii.BEL
+
+    return PASS
+
+
+class HistoryValidator(TextInputValidator):
+  """
+  This intercepts the up and down arrow keys to scroll through a backlog of
+  previous commands.
+  """
+
+  def __init__(self, command_backlog = [], next_validator = None):
+    TextInputValidator.__init__(self, next_validator)
+
+    # contents that can be scrolled back through, newest to oldest
+
+    self.command_backlog = command_backlog
+
+    # selected item from the backlog, -1 if we're not on a backlog item
+
+    self.selection_index = -1
+
+    # the fields input prior to selecting a backlog item
+
+    self.custom_input = ""
+
+  def handle_key(self, key, textbox):
+    if key in (curses.KEY_UP, curses.KEY_DOWN):
+      offset = 1 if key == curses.KEY_UP else -1
+      new_selection = self.selection_index + offset
+
+      # constrains the new selection to valid bounds
+
+      new_selection = max(-1, new_selection)
+      new_selection = min(len(self.command_backlog) - 1, new_selection)
+
+      # skips if this is a no-op
+
+      if self.selection_index == new_selection:
+        return None
+
+      # saves the previous input if we weren't on the backlog
+
+      if self.selection_index == -1:
+        self.custom_input = textbox.gather().strip()
+
+      if new_selection == -1:
+        new_input = self.custom_input
+      else:
+        new_input = self.command_backlog[new_selection]
+
+      y, _ = textbox.win.getyx()
+      _, max_x = textbox.win.getmaxyx()
+      textbox.win.clear()
+      textbox.win.addstr(y, 0, new_input[:max_x - 1])
+      textbox.win.move(y, min(len(new_input), max_x - 1))
+
+      self.selection_index = new_selection
+      return None
+
+    return PASS
+
+
+class TabCompleter(TextInputValidator):
+  """
+  Provides tab completion based on the current input, finishing if there's only
+  a single match. This expects a functor that accepts the current input and
+  provides matches.
+  """
+
+  def __init__(self, completer, next_validator = None):
+    TextInputValidator.__init__(self, next_validator)
+
+    # functor that accepts a string and gives a list of matches
+
+    self.completer = completer
+
+  def handle_key(self, key, textbox):
+    # Matches against the tab key. The ord('\t') is nine, though strangely none
+    # of the curses.KEY_*TAB constants match this...
+
+    if key == 9:
+      current_contents = textbox.gather().strip()
+      matches = self.completer(current_contents)
+      new_input = None
+
+      if len(matches) == 1:
+        # only a single match, fill it in
+        new_input = matches[0]
+      elif len(matches) > 1:
+        # looks for a common prefix we can complete
+        common_prefix = os.path.commonprefix(matches)  # weird that this comes from path...
+
+        if common_prefix != current_contents:
+          new_input = common_prefix
+
+        # TODO: somehow display matches... this is not gonna be fun
+
+      if new_input:
+        y, _ = textbox.win.getyx()
+        _, max_x = textbox.win.getmaxyx()
+        textbox.win.clear()
+        textbox.win.addstr(y, 0, new_input[:max_x - 1])
+        textbox.win.move(y, min(len(new_input), max_x - 1))
+
+      return None
+
+    return PASS
diff --git a/arm/util/torConfig.py b/arm/util/torConfig.py
deleted file mode 100644
index 5668995..0000000
--- a/arm/util/torConfig.py
+++ /dev/null
@@ -1,1301 +0,0 @@
-"""
-Helper functions for working with tor's configuration file.
-"""
-
-import os
-import time
-import socket
-import threading
-
-import stem.version
-
-from arm.util import torTools, uiTools
-
-from stem.util import conf, enum, log, str_tools, system
-
-# filename used for cached tor config descriptions
-
-CONFIG_DESC_FILENAME = "torConfigDesc.txt"
-
-# messages related to loading the tor configuration descriptions
-
-DESC_LOAD_SUCCESS_MSG = "Loaded configuration descriptions from '%s' (runtime: %0.3f)"
-DESC_LOAD_FAILED_MSG = "Unable to load configuration descriptions (%s)"
-DESC_INTERNAL_LOAD_SUCCESS_MSG = "Falling back to descriptions for Tor %s"
-DESC_INTERNAL_LOAD_FAILED_MSG = "Unable to load fallback descriptions. Categories and help for Tor's configuration options won't be available. (%s)"
-DESC_READ_MAN_SUCCESS_MSG = "Read descriptions for tor's configuration options from its man page (runtime %0.3f)"
-DESC_READ_MAN_FAILED_MSG = "Unable to get the descriptions of Tor's configuration options from its man page (%s)"
-DESC_SAVE_SUCCESS_MSG = "Saved configuration descriptions to '%s' (runtime: %0.3f)"
-DESC_SAVE_FAILED_MSG = "Unable to save configuration descriptions (%s)"
-
-
-def conf_handler(key, value):
-  if key == "torrc.important":
-    # stores lowercase entries to drop case sensitivity
-    return [entry.lower() for entry in value]
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.torrc.validate": True,
-  "torrc.important": [],
-  "torrc.alias": {},
-  "torrc.units.size.b": [],
-  "torrc.units.size.kb": [],
-  "torrc.units.size.mb": [],
-  "torrc.units.size.gb": [],
-  "torrc.units.size.tb": [],
-  "torrc.units.time.sec": [],
-  "torrc.units.time.min": [],
-  "torrc.units.time.hour": [],
-  "torrc.units.time.day": [],
-  "torrc.units.time.week": [],
-  "startup.data_directory": "~/.arm",
-  "features.config.descriptions.enabled": True,
-  "features.config.descriptions.persist": True,
-  "tor.chroot": '',
-}, conf_handler)
-
-
-def general_conf_handler(config, key):
-  value = config.get(key)
-
-  if key.startswith("torrc.summary."):
-    # we'll look for summary keys with a lowercase config name
-    CONFIG[key.lower()] = value
-  elif key.startswith("torrc.units.") and value:
-    # all the torrc.units.* values are comma separated lists
-    return [entry.strip() for entry in value[0].split(",")]
-
-
-conf.get_config("arm").add_listener(general_conf_handler, backfill = True)
-
-# enums and values for numeric torrc entries
-
-ValueType = enum.Enum("UNRECOGNIZED", "SIZE", "TIME")
-SIZE_MULT = {"b": 1, "kb": 1024, "mb": 1048576, "gb": 1073741824, "tb": 1099511627776}
-TIME_MULT = {"sec": 1, "min": 60, "hour": 3600, "day": 86400, "week": 604800}
-
-# enums for issues found during torrc validation:
-# DUPLICATE  - entry is ignored due to being a duplicate
-# MISMATCH   - the value doesn't match tor's current state
-# MISSING    - value differs from its default but is missing from the torrc
-# IS_DEFAULT - the configuration option matches tor's default
-
-ValidationError = enum.Enum("DUPLICATE", "MISMATCH", "MISSING", "IS_DEFAULT")
-
-# descriptions of tor's configuration options fetched from its man page
-
-CONFIG_DESCRIPTIONS_LOCK = threading.RLock()
-CONFIG_DESCRIPTIONS = {}
-
-# categories for tor configuration options
-
-Category = enum.Enum("GENERAL", "CLIENT", "RELAY", "DIRECTORY", "AUTHORITY", "HIDDEN_SERVICE", "TESTING", "UNKNOWN")
-
-TORRC = None  # singleton torrc instance
-MAN_OPT_INDENT = 7  # indentation before options in the man page
-MAN_EX_INDENT = 15  # indentation used for man page examples
-PERSIST_ENTRY_DIVIDER = "-" * 80 + "\n"  # splits config entries when saving to a file
-MULTILINE_PARAM = None  # cached multiline parameters (lazily loaded)
-
-# torrc options that bind to ports
-
-PORT_OPT = ("SocksPort", "ORPort", "DirPort", "ControlPort", "TransPort")
-
-
-class ManPageEntry:
-  """
-  Information provided about a tor configuration option in its man page entry.
-  """
-
-  def __init__(self, option, index, category, arg_usage, description):
-    self.option = option
-    self.index = index
-    self.category = category
-    self.arg_usage = arg_usage
-    self.description = description
-
-
-def get_torrc():
-  """
-  Singleton constructor for a Controller. Be aware that this starts as being
-  unloaded, needing the torrc contents to be loaded before being functional.
-  """
-
-  global TORRC
-
-  if TORRC is None:
-    TORRC = Torrc()
-
-  return TORRC
-
-
-def load_option_descriptions(load_path = None, check_version = True):
-  """
-  Fetches and parses descriptions for tor's configuration options from its man
-  page. This can be a somewhat lengthy call, and raises an IOError if issues
-  occure. When successful loading from a file this returns the version for the
-  contents loaded.
-
-  If available, this can load the configuration descriptions from a file where
-  they were previously persisted to cut down on the load time (latency for this
-  is around 200ms).
-
-  Arguments:
-    load_path     - if set, this attempts to fetch the configuration
-                   descriptions from the given path instead of the man page
-    check_version - discards the results if true and tor's version doens't
-                   match the cached descriptors, otherwise accepts anyway
-  """
-
-  CONFIG_DESCRIPTIONS_LOCK.acquire()
-  CONFIG_DESCRIPTIONS.clear()
-
-  raised_exc = None
-  loaded_version = ""
-
-  try:
-    if load_path:
-      # Input file is expected to be of the form:
-      # <option>
-      # <arg description>
-      # <description, possibly multiple lines>
-      # <PERSIST_ENTRY_DIVIDER>
-      input_file = open(load_path, "r")
-      input_file_contents = input_file.readlines()
-      input_file.close()
-
-      try:
-        version_line = input_file_contents.pop(0).rstrip()
-
-        if version_line.startswith("Tor Version "):
-          file_version = version_line[12:]
-          loaded_version = file_version
-          tor_version = torTools.get_conn().get_info("version", "")
-
-          if check_version and file_version != tor_version:
-            msg = "wrong version, tor is %s but the file's from %s" % (tor_version, file_version)
-            raise IOError(msg)
-        else:
-          raise IOError("unable to parse version")
-
-        while input_file_contents:
-          # gets category enum, failing if it doesn't exist
-          category = input_file_contents.pop(0).rstrip()
-
-          if not category in Category:
-            base_msg = "invalid category in input file: '%s'"
-            raise IOError(base_msg % category)
-
-          # gets the position in the man page
-          index_arg, index_str = -1, input_file_contents.pop(0).rstrip()
-
-          if index_str.startswith("index: "):
-            index_str = index_str[7:]
-
-            if index_str.isdigit():
-              index_arg = int(index_str)
-            else:
-              raise IOError("non-numeric index value: %s" % index_str)
-          else:
-            raise IOError("malformed index argument: %s" % index_str)
-
-          option = input_file_contents.pop(0).rstrip()
-          argument = input_file_contents.pop(0).rstrip()
-
-          description, loaded_line = "", input_file_contents.pop(0)
-
-          while loaded_line != PERSIST_ENTRY_DIVIDER:
-            description += loaded_line
-
-            if input_file_contents:
-              loaded_line = input_file_contents.pop(0)
-            else:
-              break
-
-          CONFIG_DESCRIPTIONS[option.lower()] = ManPageEntry(option, index_arg, category, argument, description.rstrip())
-      except IndexError:
-        CONFIG_DESCRIPTIONS.clear()
-        raise IOError("input file format is invalid")
-    else:
-      man_call_results = system.call("man tor", None)
-
-      if not man_call_results:
-        raise IOError("man page not found")
-
-      # Fetches all options available with this tor instance. This isn't
-      # vital, and the valid_options are left empty if the call fails.
-
-      conn, valid_options = torTools.get_conn(), []
-      config_option_query = conn.get_info("config/names", None)
-
-      if config_option_query:
-        for line in config_option_query.strip().split("\n"):
-          valid_options.append(line[:line.find(" ")].lower())
-
-      option_count, last_option, last_arg = 0, None, None
-      last_category, last_description = Category.GENERAL, ""
-
-      for line in man_call_results:
-        line = uiTools.get_printable(line)
-        stripped_line = line.strip()
-
-        # we have content, but an indent less than an option (ignore line)
-        #if stripped_line and not line.startswith(" " * MAN_OPT_INDENT): continue
-
-        # line starts with an indent equivilant to a new config option
-
-        is_opt_indent = line.startswith(" " * MAN_OPT_INDENT) and line[MAN_OPT_INDENT] != " "
-
-        is_category_line = not line.startswith(" ") and "OPTIONS" in line
-
-        # if this is a category header or a new option, add an entry using the
-        # buffered results
-
-        if is_opt_indent or is_category_line:
-          # Filters the line based on if the option is recognized by tor or
-          # not. This isn't necessary for arm, so if unable to make the check
-          # then we skip filtering (no loss, the map will just have some extra
-          # noise).
-
-          stripped_description = last_description.strip()
-
-          if last_option and (not valid_options or last_option.lower() in valid_options):
-            CONFIG_DESCRIPTIONS[last_option.lower()] = ManPageEntry(last_option, option_count, last_category, last_arg, stripped_description)
-            option_count += 1
-
-          last_description = ""
-
-          # parses the option and argument
-
-          line = line.strip()
-          div_index = line.find(" ")
-
-          if div_index != -1:
-            last_option, last_arg = line[:div_index], line[div_index + 1:]
-
-          # if this is a category header then switch it
-
-          if is_category_line:
-            if line.startswith("OPTIONS"):
-              last_category = Category.GENERAL
-            elif line.startswith("CLIENT"):
-              last_category = Category.CLIENT
-            elif line.startswith("SERVER"):
-              last_category = Category.RELAY
-            elif line.startswith("DIRECTORY SERVER"):
-              last_category = Category.DIRECTORY
-            elif line.startswith("DIRECTORY AUTHORITY SERVER"):
-              last_category = Category.AUTHORITY
-            elif line.startswith("HIDDEN SERVICE"):
-              last_category = Category.HIDDEN_SERVICE
-            elif line.startswith("TESTING NETWORK"):
-              last_category = Category.TESTING
-            else:
-              log.notice("Unrecognized category in the man page: %s" % line.strip())
-        else:
-          # Appends the text to the running description. Empty lines and lines
-          # starting with a specific indentation are used for formatting, for
-          # instance the ExitPolicy and TestingTorNetwork entries.
-
-          if last_description and last_description[-1] != "\n":
-            last_description += " "
-
-          if not stripped_line:
-            last_description += "\n\n"
-          elif line.startswith(" " * MAN_EX_INDENT):
-            last_description += "    %s\n" % stripped_line
-          else:
-            last_description += stripped_line
-  except IOError as exc:
-    raised_exc = exc
-
-  CONFIG_DESCRIPTIONS_LOCK.release()
-
-  if raised_exc:
-    raise raised_exc
-  else:
-    return loaded_version
-
-
-def save_option_descriptions(path):
-  """
-  Preserves the current configuration descriptors to the given path. This
-  raises an IOError or OSError if unable to do so.
-
-  Arguments:
-    path - location to persist configuration descriptors
-  """
-
-  # make dir if the path doesn't already exist
-
-  base_dir = os.path.dirname(path)
-
-  if not os.path.exists(base_dir):
-    os.makedirs(base_dir)
-
-  output_file = open(path, "w")
-
-  CONFIG_DESCRIPTIONS_LOCK.acquire()
-  sorted_options = CONFIG_DESCRIPTIONS.keys()
-  sorted_options.sort()
-
-  tor_version = torTools.get_conn().get_info("version", "")
-  output_file.write("Tor Version %s\n" % tor_version)
-
-  for i in range(len(sorted_options)):
-    man_entry = get_config_description(sorted_options[i])
-    output_file.write("%s\nindex: %i\n%s\n%s\n%s\n" % (man_entry.category, man_entry.index, man_entry.option, man_entry.arg_usage, man_entry.description))
-
-    if i != len(sorted_options) - 1:
-      output_file.write(PERSIST_ENTRY_DIVIDER)
-
-  output_file.close()
-  CONFIG_DESCRIPTIONS_LOCK.release()
-
-
-def get_config_summary(option):
-  """
-  Provides a short summary description of the configuration option. If none is
-  known then this proivdes None.
-
-  Arguments:
-    option - tor config option
-  """
-
-  return CONFIG.get("torrc.summary.%s" % option.lower())
-
-
-def is_important(option):
-  """
-  Provides True if the option has the 'important' flag in the configuration,
-  False otherwise.
-
-  Arguments:
-    option - tor config option
-  """
-
-  return option.lower() in CONFIG["torrc.important"]
-
-
-def get_config_description(option):
-  """
-  Provides ManPageEntry instances populated with information fetched from the
-  tor man page. This provides None if no such option has been loaded. If the
-  man page is in the process of being loaded then this call blocks until it
-  finishes.
-
-  Arguments:
-    option - tor config option
-  """
-
-  CONFIG_DESCRIPTIONS_LOCK.acquire()
-
-  if option.lower() in CONFIG_DESCRIPTIONS:
-    return_val = CONFIG_DESCRIPTIONS[option.lower()]
-  else:
-    return_val = None
-
-  CONFIG_DESCRIPTIONS_LOCK.release()
-  return return_val
-
-
-def get_config_options():
-  """
-  Provides the configuration options from the loaded man page. This is an empty
-  list if no man page has been loaded.
-  """
-
-  CONFIG_DESCRIPTIONS_LOCK.acquire()
-
-  return_val = [CONFIG_DESCRIPTIONS[opt].option for opt in CONFIG_DESCRIPTIONS]
-
-  CONFIG_DESCRIPTIONS_LOCK.release()
-  return return_val
-
-
-def get_config_location():
-  """
-  Provides the location of the torrc, raising an IOError with the reason if the
-  path can't be determined.
-  """
-
-  conn = torTools.get_conn()
-  config_location = conn.get_info("config-file", None)
-  tor_pid, tor_prefix = conn.controller.get_pid(None), CONFIG['tor.chroot']
-
-  if not config_location:
-    raise IOError("unable to query the torrc location")
-
-  try:
-    tor_cwd = system.get_cwd(tor_pid)
-    return tor_prefix + system.expand_path(config_location, tor_cwd)
-  except IOError as exc:
-    raise IOError("querying tor's pwd failed because %s" % exc)
-
-
-def get_multiline_parameters():
-  """
-  Provides parameters that can be defined multiple times in the torrc without
-  overwriting the value.
-  """
-
-  # fetches config options with the LINELIST (aka 'LineList'), LINELIST_S (aka
-  # 'Dependent'), and LINELIST_V (aka 'Virtual') types
-
-  global MULTILINE_PARAM
-
-  if MULTILINE_PARAM is None:
-    conn, multiline_entries = torTools.get_conn(), []
-
-    config_option_query = conn.get_info("config/names", None)
-
-    if config_option_query:
-      for line in config_option_query.strip().split("\n"):
-        conf_option, conf_type = line.strip().split(" ", 1)
-
-        if conf_type in ("LineList", "Dependant", "Virtual"):
-          multiline_entries.append(conf_option)
-    else:
-      # unable to query tor connection, so not caching results
-      return ()
-
-    MULTILINE_PARAM = multiline_entries
-
-  return tuple(MULTILINE_PARAM)
-
-
-def get_custom_options(include_value = False):
-  """
-  Provides the torrc parameters that differ from their defaults.
-
-  Arguments:
-    include_value - provides the current value with results if true, otherwise
-                   this just contains the options
-  """
-
-  config_text = torTools.get_conn().get_info("config-text", "").strip()
-  config_lines = config_text.split("\n")
-
-  # removes any duplicates
-
-  config_lines = list(set(config_lines))
-
-  # The "GETINFO config-text" query only provides options that differ
-  # from Tor's defaults with the exception of its Log and Nickname entries
-  # which, even if undefined, returns "Log notice stdout" as per:
-  # https://trac.torproject.org/projects/tor/ticket/2362
-  #
-  # If this is from the deb then it will be "Log notice file /var/log/tor/log"
-  # due to special patching applied to it, as per:
-  # https://trac.torproject.org/projects/tor/ticket/4602
-
-  try:
-    config_lines.remove("Log notice stdout")
-  except ValueError:
-    pass
-
-  try:
-    config_lines.remove("Log notice file /var/log/tor/log")
-  except ValueError:
-    pass
-
-  try:
-    config_lines.remove("Nickname %s" % socket.gethostname())
-  except ValueError:
-    pass
-
-  if include_value:
-    return config_lines
-  else:
-    return [line[:line.find(" ")] for line in config_lines]
-
-
-def save_conf(destination = None, contents = None):
-  """
-  Saves the configuration to the given path. If this is equivilant to
-  issuing a SAVECONF (the contents and destination match what tor's using)
-  then that's done. Otherwise, this writes the contents directly. This raises
-  an IOError if unsuccessful.
-
-  Arguments:
-    destination - path to be saved to, the current config location if None
-    contents    - configuration to be saved, the current config if None
-  """
-
-  if destination:
-    destination = os.path.abspath(destination)
-
-  # fills default config values, and sets is_saveconf to false if they differ
-  # from the arguments
-
-  is_saveconf, start_time = True, time.time()
-
-  current_config = get_custom_options(True)
-
-  if not contents:
-    contents = current_config
-  else:
-    is_saveconf &= contents == current_config
-
-  # The "GETINFO config-text" option was introduced in Tor version 0.2.2.7. If
-  # we're writing custom contents then this is fine, but if we're trying to
-  # save the current configuration then we need to fail if it's unavailable.
-  # Otherwise we'd write a blank torrc as per...
-  # https://trac.torproject.org/projects/tor/ticket/3614
-
-  if contents == ['']:
-    # double check that "GETINFO config-text" is unavailable rather than just
-    # giving an empty result
-
-    if torTools.get_conn().get_info("config-text", None) is None:
-      raise IOError("determining the torrc requires Tor version 0.2.2.7")
-
-  current_location = None
-
-  try:
-    current_location = get_config_location()
-
-    if not destination:
-      destination = current_location
-    else:
-      is_saveconf &= destination == current_location
-  except IOError:
-    pass
-
-  if not destination:
-    raise IOError("unable to determine the torrc's path")
-
-  log_msg = "Saved config by %%s to %s (runtime: %%0.4f)" % destination
-
-  # attempts SAVECONF if we're updating our torrc with the current state
-
-  if is_saveconf:
-    try:
-      torTools.get_conn().save_conf()
-
-      try:
-        get_torrc().load()
-      except IOError:
-        pass
-
-      log.debug(log_msg % ("SAVECONF", time.time() - start_time))
-      return  # if successful then we're done
-    except:
-      pass
-
-  # if the SAVECONF fails or this is a custom save then write contents directly
-
-  try:
-    # make dir if the path doesn't already exist
-
-    base_dir = os.path.dirname(destination)
-
-    if not os.path.exists(base_dir):
-      os.makedirs(base_dir)
-
-    # saves the configuration to the file
-
-    config_file = open(destination, "w")
-    config_file.write("\n".join(contents))
-    config_file.close()
-  except (IOError, OSError) as exc:
-    raise IOError(exc)
-
-  # reloads the cached torrc if overwriting it
-
-  if destination == current_location:
-    try:
-      get_torrc().load()
-    except IOError:
-      pass
-
-  log.debug(log_msg % ("directly writing", time.time() - start_time))
-
-
-def validate(contents = None):
-  """
-  Performs validation on the given torrc contents, providing back a listing of
-  (line number, issue, msg) tuples for issues found. If the issue occures on a
-  multiline torrc entry then the line number is for the last line of the entry.
-
-  Arguments:
-    contents - torrc contents
-  """
-
-  conn = torTools.get_conn()
-  custom_options = get_custom_options()
-  issues_found, seen_options = [], []
-
-  # Strips comments and collapses multiline multi-line entries, for more
-  # information see:
-  # https://trac.torproject.org/projects/tor/ticket/1929
-
-  stripped_contents, multiline_buffer = [], ""
-
-  for line in _strip_comments(contents):
-    if not line:
-      stripped_contents.append("")
-    else:
-      line = multiline_buffer + line
-      multiline_buffer = ""
-
-      if line.endswith("\\"):
-        multiline_buffer = line[:-1]
-        stripped_contents.append("")
-      else:
-        stripped_contents.append(line.strip())
-
-  for line_number in range(len(stripped_contents) - 1, -1, -1):
-    line_text = stripped_contents[line_number]
-
-    if not line_text:
-      continue
-
-    line_comp = line_text.split(None, 1)
-
-    if len(line_comp) == 2:
-      option, value = line_comp
-    else:
-      option, value = line_text, ""
-
-    # Tor is case insensetive when parsing its torrc. This poses a bit of an
-    # issue for us because we want all of our checks to be case insensetive
-    # too but also want messages to match the normal camel-case conventions.
-    #
-    # Using the custom_options to account for this. It contains the tor reported
-    # options (camel case) and is either a matching set or the following defaut
-    # value check will fail. Hence using that hash to correct the case.
-    #
-    # TODO: when refactoring for stem make this less confusing...
-
-    for custom_opt in custom_options:
-      if custom_opt.lower() == option.lower():
-        option = custom_opt
-        break
-
-    # if an aliased option then use its real name
-
-    if option in CONFIG["torrc.alias"]:
-      option = CONFIG["torrc.alias"][option]
-
-    # most parameters are overwritten if defined multiple times
-
-    if option in seen_options and not option in get_multiline_parameters():
-      issues_found.append((line_number, ValidationError.DUPLICATE, option))
-      continue
-    else:
-      seen_options.append(option)
-
-    # checks if the value isn't necessary due to matching the defaults
-
-    if not option in custom_options:
-      issues_found.append((line_number, ValidationError.IS_DEFAULT, option))
-
-    # replace aliases with their recognized representation
-
-    if option in CONFIG["torrc.alias"]:
-      option = CONFIG["torrc.alias"][option]
-
-    # tor appears to replace tabs with a space, for instance:
-    # "accept\t*:563" is read back as "accept *:563"
-
-    value = value.replace("\t", " ")
-
-    # parse value if it's a size or time, expanding the units
-
-    value, value_type = _parse_conf_value(value)
-
-    # issues GETCONF to get the values tor's currently configured to use
-
-    tor_values = conn.get_option(option, [], True)
-
-    # multiline entries can be comma separated values (for both tor and conf)
-
-    value_list = [value]
-
-    if option in get_multiline_parameters():
-      value_list = [val.strip() for val in value.split(",")]
-
-      fetched_values, tor_values = tor_values, []
-      for fetched_value in fetched_values:
-        for fetched_entry in fetched_value.split(","):
-          fetched_entry = fetched_entry.strip()
-
-          if not fetched_entry in tor_values:
-            tor_values.append(fetched_entry)
-
-    for val in value_list:
-      # checks if both the argument and tor's value are empty
-
-      is_blank_match = not val and not tor_values
-
-      if not is_blank_match and not val in tor_values:
-        # converts corrections to reader friedly size values
-
-        display_values = tor_values
-
-        if value_type == ValueType.SIZE:
-          display_values = [str_tools.get_size_label(int(val)) for val in tor_values]
-        elif value_type == ValueType.TIME:
-          display_values = [str_tools.get_time_label(int(val)) for val in tor_values]
-
-        issues_found.append((line_number, ValidationError.MISMATCH, ", ".join(display_values)))
-
-  # checks if any custom options are missing from the torrc
-
-  for option in custom_options:
-    # In new versions the 'DirReqStatistics' option is true by default and
-    # disabled on startup if geoip lookups are unavailable. If this option is
-    # missing then that's most likely the reason.
-    #
-    # https://trac.torproject.org/projects/tor/ticket/4237
-
-    if option == "DirReqStatistics":
-      continue
-
-    if not option in seen_options:
-      issues_found.append((None, ValidationError.MISSING, option))
-
-  return issues_found
-
-
-def _parse_conf_value(conf_arg):
-  """
-  Converts size or time values to their lowest units (bytes or seconds) which
-  is what GETCONF calls provide. The returned is a tuple of the value and unit
-  type.
-
-  Arguments:
-    conf_arg - torrc argument
-  """
-
-  if conf_arg.count(" ") == 1:
-    val, unit = conf_arg.lower().split(" ", 1)
-
-    if not val.isdigit():
-      return conf_arg, ValueType.UNRECOGNIZED
-
-    mult, mult_type = _get_unit_type(unit)
-
-    if mult is not None:
-      return str(int(val) * mult), mult_type
-
-  return conf_arg, ValueType.UNRECOGNIZED
-
-
-def _get_unit_type(unit):
-  """
-  Provides the type and multiplier for an argument's unit. The multiplier is
-  None if the unit isn't recognized.
-
-  Arguments:
-    unit - string representation of a unit
-  """
-
-  for label in SIZE_MULT:
-    if unit in CONFIG["torrc.units.size." + label]:
-      return SIZE_MULT[label], ValueType.SIZE
-
-  for label in TIME_MULT:
-    if unit in CONFIG["torrc.units.time." + label]:
-      return TIME_MULT[label], ValueType.TIME
-
-  return None, ValueType.UNRECOGNIZED
-
-
-def _strip_comments(contents):
-  """
-  Removes comments and extra whitespace from the given torrc contents.
-
-  Arguments:
-    contents - torrc contents
-  """
-
-  stripped_contents = []
-
-  for line in contents:
-    if line and "#" in line:
-      line = line[:line.find("#")]
-
-    stripped_contents.append(line.strip())
-
-  return stripped_contents
-
-
-class Torrc():
-  """
-  Wrapper for the torrc. All getters provide None if the contents are unloaded.
-  """
-
-  def __init__(self):
-    self.contents = None
-    self.config_location = None
-    self.vals_lock = threading.RLock()
-
-    # cached results for the current contents
-    self.displayable_contents = None
-    self.stripped_contents = None
-    self.corrections = None
-
-    # flag to indicate if we've given a load failure warning before
-    self.is_foad_fail_warned = False
-
-  def load(self, log_failure = False):
-    """
-    Loads or reloads the torrc contents, raising an IOError if there's a
-    problem.
-
-    Arguments:
-      log_failure - if the torrc fails to load and we've never provided a
-                   warning for this before then logs a warning
-    """
-
-    self.vals_lock.acquire()
-
-    # clears contents and caches
-    self.contents, self.config_location = None, None
-    self.displayable_contents = None
-    self.stripped_contents = None
-    self.corrections = None
-
-    try:
-      self.config_location = get_config_location()
-      config_file = open(self.config_location, "r")
-      self.contents = config_file.readlines()
-      config_file.close()
-    except IOError as exc:
-      if log_failure and not self.is_foad_fail_warned:
-        log.warn("Unable to load torrc (%s)" % exc.strerror)
-        self.is_foad_fail_warned = True
-
-      self.vals_lock.release()
-      raise exc
-
-    self.vals_lock.release()
-
-  def is_loaded(self):
-    """
-    Provides true if there's loaded contents, false otherwise.
-    """
-
-    return self.contents is not None
-
-  def get_config_location(self):
-    """
-    Provides the location of the loaded configuration contents. This may be
-    available, even if the torrc failed to be loaded.
-    """
-
-    return self.config_location
-
-  def get_contents(self):
-    """
-    Provides the contents of the configuration file.
-    """
-
-    self.vals_lock.acquire()
-    return_val = list(self.contents) if self.contents else None
-    self.vals_lock.release()
-    return return_val
-
-  def get_display_contents(self, strip = False):
-    """
-    Provides the contents of the configuration file, formatted in a rendering
-    frindly fashion:
-    - Tabs print as three spaces. Keeping them as tabs is problematic for
-      layouts since it's counted as a single character, but occupies several
-      cells.
-    - Strips control and unprintable characters.
-
-    Arguments:
-      strip - removes comments and extra whitespace if true
-    """
-
-    self.vals_lock.acquire()
-
-    if not self.is_loaded():
-      return_val = None
-    else:
-      if self.displayable_contents is None:
-        # restricts contents to displayable characters
-        self.displayable_contents = []
-
-        for line_number in range(len(self.contents)):
-          line_text = self.contents[line_number]
-          line_text = line_text.replace("\t", "   ")
-          line_text = uiTools.get_printable(line_text)
-          self.displayable_contents.append(line_text)
-
-      if strip:
-        if self.stripped_contents is None:
-          self.stripped_contents = _strip_comments(self.displayable_contents)
-
-        return_val = list(self.stripped_contents)
-      else:
-        return_val = list(self.displayable_contents)
-
-    self.vals_lock.release()
-    return return_val
-
-  def get_corrections(self):
-    """
-    Performs validation on the loaded contents and provides back the
-    corrections. If validation is disabled then this won't provide any
-    results.
-    """
-
-    self.vals_lock.acquire()
-
-    if not self.is_loaded():
-      return_val = None
-    else:
-      tor_version = torTools.get_conn().get_version()
-      skip_validation = not CONFIG["features.torrc.validate"]
-      skip_validation |= (tor_version is None or not tor_version >= stem.version.Requirement.GETINFO_CONFIG_TEXT)
-
-      if skip_validation:
-        log.info("Skipping torrc validation (requires tor 0.2.2.7-alpha)")
-        return_val = {}
-      else:
-        if self.corrections is None:
-          self.corrections = validate(self.contents)
-
-        return_val = list(self.corrections)
-
-    self.vals_lock.release()
-    return return_val
-
-  def get_lock(self):
-    """
-    Provides the lock governing concurrent access to the contents.
-    """
-
-    return self.vals_lock
-
-  def log_validation_issues(self):
-    """
-    Performs validation on the loaded contents, and logs warnings for issues
-    that are found.
-    """
-
-    corrections = self.get_corrections()
-
-    if corrections:
-      duplicate_options, default_options, mismatch_lines, missing_options = [], [], [], []
-
-      for line_number, issue, msg in corrections:
-        if issue == ValidationError.DUPLICATE:
-          duplicate_options.append("%s (line %i)" % (msg, line_number + 1))
-        elif issue == ValidationError.IS_DEFAULT:
-          default_options.append("%s (line %i)" % (msg, line_number + 1))
-        elif issue == ValidationError.MISMATCH:
-          mismatch_lines.append(line_number + 1)
-        elif issue == ValidationError.MISSING:
-          missing_options.append(msg)
-
-      if duplicate_options or default_options:
-        msg = "Unneeded torrc entries found. They've been highlighted in blue on the torrc page."
-
-        if duplicate_options:
-          if len(duplicate_options) > 1:
-            msg += "\n- entries ignored due to having duplicates: "
-          else:
-            msg += "\n- entry ignored due to having a duplicate: "
-
-          duplicate_options.sort()
-          msg += ", ".join(duplicate_options)
-
-        if default_options:
-          if len(default_options) > 1:
-            msg += "\n- entries match their default values: "
-          else:
-            msg += "\n- entry matches its default value: "
-
-          default_options.sort()
-          msg += ", ".join(default_options)
-
-        log.notice(msg)
-
-      if mismatch_lines or missing_options:
-        msg = "The torrc differs from what tor's using. You can issue a sighup to reload the torrc values by pressing x."
-
-        if mismatch_lines:
-          if len(mismatch_lines) > 1:
-            msg += "\n- torrc values differ on lines: "
-          else:
-            msg += "\n- torrc value differs on line: "
-
-          mismatch_lines.sort()
-          msg += ", ".join([str(val + 1) for val in mismatch_lines])
-
-        if missing_options:
-          if len(missing_options) > 1:
-            msg += "\n- configuration values are missing from the torrc: "
-          else:
-            msg += "\n- configuration value is missing from the torrc: "
-
-          missing_options.sort()
-          msg += ", ".join(missing_options)
-
-        log.warn(msg)
-
-
-def _test_config_descriptions():
-  """
-  Tester for the load_option_descriptions function, fetching the man page
-  contents and dumping its parsed results.
-  """
-
-  load_option_descriptions()
-  sorted_options = CONFIG_DESCRIPTIONS.keys()
-  sorted_options.sort()
-
-  for i in range(len(sorted_options)):
-    option = sorted_options[i]
-    argument, description = get_config_description(option)
-    opt_label = "OPTION: \"%s\"" % option
-    arg_label = "ARGUMENT: \"%s\"" % argument
-
-    print "     %-45s %s" % (opt_label, arg_label)
-    print "\"%s\"" % description
-
-    if i != len(sorted_options) - 1:
-      print "-" * 80
-
-
-def is_root_needed(torrc_path):
-  """
-  Returns True if the given torrc needs root permissions to be ran, False
-  otherwise. This raises an IOError if the torrc can't be read.
-
-  Arguments:
-    torrc_path - torrc to be checked
-  """
-
-  try:
-    torrc_file = open(torrc_path, "r")
-    torrc_lines = torrc_file.readlines()
-    torrc_file.close()
-
-    for line in torrc_lines:
-      line = line.strip()
-
-      is_port_opt = False
-
-      for opt in PORT_OPT:
-        if line.startswith(opt):
-          is_port_opt = True
-          break
-
-      if is_port_opt and " " in line:
-        arg = line.split(" ")[1]
-
-        if arg.isdigit() and int(arg) <= 1024 and int(arg) != 0:
-          return True
-
-    return False
-  except Exception as exc:
-    raise IOError(exc)
-
-
-def render_torrc(template, options, comment_indent = 30):
-  """
-  Uses the given template to generate a nicely formatted torrc with the given
-  options. The tempating language this recognizes is a simple one, recognizing
-  the following options:
-    [IF <option>]         # if <option> maps to true or a non-empty string
-    [IF NOT <option>]     # logical inverse
-    [IF <opt1> | <opt2>]  # logical or of the options
-    [ELSE]          # if the prior conditional evaluated to false
-    [END IF]        # ends the control block
-
-    [<option>]      # inputs the option value, omitting the line if it maps
-                    # to a boolean or empty string
-    [NEWLINE]       # empty line, otherwise templating white space is ignored
-
-  Arguments:
-    template      - torrc template lines used to generate the results
-    options       - mapping of keywords to their given values, with values
-                    being booleans or strings (possibly multi-line)
-    comment_indent - minimum column that comments align on
-  """
-
-  results = []
-  template_iter = iter(template)
-  comment_line_format = "%%-%is%%s" % comment_indent
-
-  try:
-    while True:
-      line = template_iter.next().strip()
-
-      if line.startswith("[IF ") and line.endswith("]"):
-        # checks if any of the conditional options are true or a non-empty string
-
-        evaluates_true = False
-
-        for cond in line[4:-1].split("|"):
-          is_inverse = False
-
-          if cond.startswith("NOT "):
-            is_inverse = True
-            cond = cond[4:]
-
-          if is_inverse != bool(options.get(cond.strip())):
-            evaluates_true = True
-            break
-
-        if evaluates_true:
-          continue
-        else:
-          # skips lines until we come to an else or the end of the block
-          depth = 0
-
-          while depth != -1:
-            line = template_iter.next().strip()
-
-            if line.startswith("[IF ") and line.endswith("]"):
-              depth += 1
-            elif line == "[END IF]":
-              depth -= 1
-            elif depth == 0 and line == "[ELSE]":
-              depth -= 1
-      elif line == "[ELSE]":
-        # an else block we aren't using - skip to the end of it
-        depth = 0
-
-        while depth != -1:
-          line = template_iter.next().strip()
-
-          if line.startswith("[IF "):
-            depth += 1
-          elif line == "[END IF]":
-            depth -= 1
-      elif line == "[NEWLINE]":
-        # explicit newline
-        results.append("")
-      elif line.startswith("#"):
-        # comment only
-        results.append(line)
-      elif line.startswith("[") and line.endswith("]"):
-        # completely dynamic entry
-
-        opt_value = options.get(line[1:-1])
-
-        if opt_value:
-          results.append(opt_value)
-      else:
-        # torrc option line
-
-        option, arg, comment = "", "", ""
-        parsed_line = line
-
-        if "#" in parsed_line:
-          parsed_line, comment = parsed_line.split("#", 1)
-          parsed_line = parsed_line.strip()
-          comment = "# %s" % comment.strip()
-
-        # parses the argument from the option
-
-        if " " in parsed_line.strip():
-          option, arg = parsed_line.split(" ", 1)
-          option = option.strip()
-        else:
-          log.info("torrc template option lacks an argument: '%s'" % line)
-          continue
-
-        # inputs dynamic arguments
-
-        if arg.startswith("[") and arg.endswith("]"):
-          arg = options.get(arg[1:-1])
-
-        # skips argument if it's false or an empty string
-
-        if not arg:
-          continue
-
-        torrc_entry = "%s %s" % (option, arg)
-
-        if comment:
-          results.append(comment_line_format % (torrc_entry + " ", comment))
-        else:
-          results.append(torrc_entry)
-  except StopIteration:
-    pass
-
-  return "\n".join(results)
-
-
-def load_configuration_descriptions(path_prefix):
-  """
-  Attempts to load descriptions for tor's configuration options, fetching them
-  from the man page and persisting them to a file to speed future startups.
-  """
-
-  # It is important that this is loaded before entering the curses context,
-  # otherwise the man call pegs the cpu for around a minute (I'm not sure
-  # why... curses must mess the terminal in a way that's important to man).
-
-  if CONFIG["features.config.descriptions.enabled"]:
-    is_config_descriptions_loaded = False
-
-    # determines the path where cached descriptions should be persisted (left
-    # undefined if caching is disabled)
-
-    descriptor_path = None
-
-    if CONFIG["features.config.descriptions.persist"]:
-      data_dir = CONFIG["startup.data_directory"]
-
-      if not data_dir.endswith("/"):
-        data_dir += "/"
-
-      descriptor_path = os.path.expanduser(data_dir + "cache/") + CONFIG_DESC_FILENAME
-
-    # attempts to load configuration descriptions cached in the data directory
-
-    if descriptor_path:
-      try:
-        load_start_time = time.time()
-        load_option_descriptions(descriptor_path)
-        is_config_descriptions_loaded = True
-
-        log.info(DESC_LOAD_SUCCESS_MSG % (descriptor_path, time.time() - load_start_time))
-      except IOError as exc:
-        log.info(DESC_LOAD_FAILED_MSG % exc.strerror)
-
-    # fetches configuration options from the man page
-
-    if not is_config_descriptions_loaded:
-      try:
-        load_start_time = time.time()
-        load_option_descriptions()
-        is_config_descriptions_loaded = True
-
-        log.info(DESC_READ_MAN_SUCCESS_MSG % (time.time() - load_start_time))
-      except IOError as exc:
-        log.notice(DESC_READ_MAN_FAILED_MSG % exc.strerror)
-
-      # persists configuration descriptions
-
-      if is_config_descriptions_loaded and descriptor_path:
-        try:
-          load_start_time = time.time()
-          save_option_descriptions(descriptor_path)
-          log.info(DESC_SAVE_SUCCESS_MSG % (descriptor_path, time.time() - load_start_time))
-        except IOError as exc:
-          log.notice(DESC_SAVE_FAILED_MSG % exc.strerror)
-        except OSError as exc:
-          log.notice(DESC_SAVE_FAILED_MSG % exc)
-
-    # finally fall back to the cached descriptors provided with arm (this is
-    # often the case for tbb and manual builds)
-
-    if not is_config_descriptions_loaded:
-      try:
-        load_start_time = time.time()
-        loaded_version = load_option_descriptions("%sresources/%s" % (path_prefix, CONFIG_DESC_FILENAME), False)
-        is_config_descriptions_loaded = True
-        log.notice(DESC_INTERNAL_LOAD_SUCCESS_MSG % loaded_version)
-      except IOError as exc:
-        log.error(DESC_INTERNAL_LOAD_FAILED_MSG % exc.strerror)
diff --git a/arm/util/torTools.py b/arm/util/torTools.py
deleted file mode 100644
index a7945fd..0000000
--- a/arm/util/torTools.py
+++ /dev/null
@@ -1,1039 +0,0 @@
-"""
-Helper for working with an active tor process. This both provides a wrapper for
-accessing stem and notifications of state changes to subscribers.
-"""
-
-import math
-import os
-import threading
-import time
-
-import stem
-import stem.control
-
-from stem.util import log, proc, system
-
-CONTROLLER = None  # singleton Controller instance
-
-UNDEFINED = "<Undefined_ >"
-
-
-def get_conn():
-  """
-  Singleton constructor for a Controller. Be aware that this starts as being
-  uninitialized, needing a stem Controller before it's fully functional.
-  """
-
-  global CONTROLLER
-
-  if CONTROLLER is None:
-    CONTROLLER = Controller()
-
-  return CONTROLLER
-
-
-class Controller:
-  """
-  Stem wrapper providing convenience functions (mostly from the days of using
-  TorCtl), listener functionality for tor's state, and the capability for
-  controller connections to be restarted if closed.
-  """
-
-  def __init__(self):
-    self.controller = None
-    self.conn_lock = threading.RLock()
-    self._fingerprint_mappings = None     # mappings of ip -> [(port, fingerprint), ...]
-    self._fingerprint_lookup_cache = {}   # lookup cache with (ip, port) -> fingerprint mappings
-    self._nickname_lookup_cache = {}      # lookup cache with fingerprint -> nickname mappings
-    self._address_lookup_cache = {}       # lookup cache with fingerprint -> (ip address, or port) mappings
-    self._consensus_lookup_cache = {}     # lookup cache with network status entries
-    self._descriptor_lookup_cache = {}    # lookup cache with relay descriptors
-    self._last_newnym = 0                 # time we last sent a NEWNYM signal
-
-  def init(self, controller):
-    """
-    Uses the given stem instance for future operations, notifying listeners
-    about the change.
-
-    Arguments:
-      controller - stem based Controller instance
-    """
-
-    # TODO: We should reuse our controller instance so event listeners will be
-    # re-attached. This is a point of regression until we do... :(
-
-    if controller.is_alive() and controller != self.controller:
-      self.conn_lock.acquire()
-
-      if self.controller:
-        self.close()  # shut down current connection
-
-      self.controller = controller
-      log.info("Stem connected to tor version %s" % self.controller.get_version())
-
-      self.controller.add_event_listener(self.ns_event, stem.control.EventType.NS)
-      self.controller.add_event_listener(self.new_consensus_event, stem.control.EventType.NEWCONSENSUS)
-      self.controller.add_event_listener(self.new_desc_event, stem.control.EventType.NEWDESC)
-
-      # reset caches for ip -> fingerprint lookups
-
-      self._fingerprint_mappings = None
-      self._fingerprint_lookup_cache = {}
-      self._nickname_lookup_cache = {}
-      self._address_lookup_cache = {}
-      self._consensus_lookup_cache = {}
-      self._descriptor_lookup_cache = {}
-
-      # time that we sent our last newnym signal
-
-      self._last_newnym = 0
-
-      self.conn_lock.release()
-
-  def close(self):
-    """
-    Closes the current stem instance and notifies listeners.
-    """
-
-    self.conn_lock.acquire()
-
-    if self.controller:
-      self.controller.close()
-
-    self.conn_lock.release()
-
-  def get_controller(self):
-    return self.controller
-
-  def is_alive(self):
-    """
-    Returns True if this has been initialized with a working stem instance,
-    False otherwise.
-    """
-
-    self.conn_lock.acquire()
-
-    result = False
-
-    if self.controller:
-      if self.controller.is_alive():
-        result = True
-      else:
-        self.close()
-
-    self.conn_lock.release()
-    return result
-
-  def get_info(self, param, default = UNDEFINED):
-    """
-    Queries the control port for the given GETINFO option, providing the
-    default if the response is undefined or fails for any reason (error
-    response, control port closed, initiated, etc).
-
-    Arguments:
-      param   - GETINFO option to be queried
-      default - result if the query fails
-    """
-
-    self.conn_lock.acquire()
-
-    try:
-      if not self.is_alive():
-        if default != UNDEFINED:
-          return default
-        else:
-          raise stem.SocketClosed()
-
-      if default != UNDEFINED:
-        return self.controller.get_info(param, default)
-      else:
-        return self.controller.get_info(param)
-    except stem.SocketClosed as exc:
-      self.close()
-      raise exc
-    finally:
-      self.conn_lock.release()
-
-  def get_option(self, param, default = UNDEFINED, multiple = False):
-    """
-    Queries the control port for the given configuration option, providing the
-    default if the response is undefined or fails for any reason. If multiple
-    values exist then this arbitrarily returns the first unless the multiple
-    flag is set.
-
-    Arguments:
-      param     - configuration option to be queried
-      default   - result if the query fails
-      multiple  - provides a list with all returned values if true, otherwise
-                  this just provides the first result
-    """
-
-    self.conn_lock.acquire()
-
-    try:
-      if not self.is_alive():
-        if default != UNDEFINED:
-          return default
-        else:
-          raise stem.SocketClosed()
-
-      if default != UNDEFINED:
-        return self.controller.get_conf(param, default, multiple)
-      else:
-        return self.controller.get_conf(param, multiple = multiple)
-    except stem.SocketClosed as exc:
-      self.close()
-      raise exc
-    finally:
-      self.conn_lock.release()
-
-  def set_option(self, param, value = None):
-    """
-    Issues a SETCONF to set the given option/value pair. An exeptions raised
-    if it fails to be set. If no value is provided then this sets the option to
-    0 or NULL.
-
-    Arguments:
-      param - configuration option to be set
-      value - value to set the parameter to (this can be either a string or a
-              list of strings)
-    """
-
-    self.conn_lock.acquire()
-
-    try:
-      if not self.is_alive():
-        raise stem.SocketClosed()
-
-      self.controller.set_conf(param, value)
-    except stem.SocketClosed as exc:
-      self.close()
-      raise exc
-    finally:
-      self.conn_lock.release()
-
-  def save_conf(self):
-    """
-    Calls tor's SAVECONF method.
-    """
-
-    self.conn_lock.acquire()
-
-    if self.is_alive():
-      self.controller.save_conf()
-
-    self.conn_lock.release()
-
-  def send_newnym(self):
-    """
-    Sends a newnym request to Tor. These are rate limited so if it occures
-    more than once within a ten second window then the second is delayed.
-    """
-
-    self.conn_lock.acquire()
-
-    if self.is_alive():
-      self._last_newnym = time.time()
-      self.controller.signal(stem.Signal.NEWNYM)
-
-    self.conn_lock.release()
-
-  def is_newnym_available(self):
-    """
-    True if Tor will immediately respect a newnym request, false otherwise.
-    """
-
-    if self.is_alive():
-      return self.get_newnym_wait() == 0
-    else:
-      return False
-
-  def get_newnym_wait(self):
-    """
-    Provides the number of seconds until a newnym signal would be respected.
-    """
-
-    # newnym signals can occure at the rate of one every ten seconds
-    # TODO: this can't take other controllers into account :(
-
-    return max(0, math.ceil(self._last_newnym + 10 - time.time()))
-
-  def get_circuits(self, default = []):
-    """
-    This provides a list with tuples of the form:
-    (circuit_id, status, purpose, (fingerprint1, fingerprint2...))
-
-    Arguments:
-      default - value provided back if unable to query the circuit-status
-    """
-
-    # TODO: We're losing caching around this. We should check to see the call
-    # volume of this and probably add it to stem.
-
-    results = []
-
-    for entry in self.controller.get_circuits():
-      fingerprints = []
-
-      for fp, nickname in entry.path:
-        if not fp:
-          consensus_entry = self.controller.get_network_status(nickname, None)
-
-          if consensus_entry:
-            fp = consensus_entry.fingerprint
-
-          # It shouldn't be possible for this lookup to fail, but we
-          # need to fill something (callers won't expect our own client
-          # paths to have unknown relays). If this turns out to be wrong
-          # then log a warning.
-
-          if not fp:
-            log.warn("Unable to determine the fingerprint for a relay in our own circuit: %s" % nickname)
-            fp = "0" * 40
-
-        fingerprints.append(fp)
-
-      results.append((int(entry.id), entry.status, entry.purpose, fingerprints))
-
-    if results:
-      return results
-    else:
-      return default
-
-  def get_hidden_service_ports(self, default = []):
-    """
-    Provides the target ports hidden services are configured to use.
-
-    Arguments:
-      default - value provided back if unable to query the hidden service ports
-    """
-
-    result = []
-    hs_options = self.controller.get_conf_map("HiddenServiceOptions", {})
-
-    for entry in hs_options.get("HiddenServicePort", []):
-      # HiddenServicePort entries are of the form...
-      #
-      #   VIRTPORT [TARGET]
-      #
-      # ... with the TARGET being an address, port, or address:port. If the
-      # target port isn't defined then uses the VIRTPORT.
-
-      hs_port = None
-
-      if ' ' in entry:
-        virtport, target = entry.split(' ', 1)
-
-        if ':' in target:
-          hs_port = target.split(':', 1)[1]  # target is an address:port
-        elif target.isdigit():
-          hs_port = target  # target is a port
-        else:
-          hs_port = virtport  # target is an address
-      else:
-        hs_port = entry  # just has the virtual port
-
-      if hs_port.isdigit():
-        result.append(hs_port)
-
-    if result:
-      return result
-    else:
-      return default
-
-  def get_my_bandwidth_rate(self, default = None):
-    """
-    Provides the effective relaying bandwidth rate of this relay. Currently
-    this doesn't account for SETCONF events.
-
-    Arguments:
-      default - result if the query fails
-    """
-
-    # effective relayed bandwidth is the minimum of BandwidthRate,
-    # MaxAdvertisedBandwidth, and RelayBandwidthRate (if set)
-
-    effective_rate = int(self.get_option("BandwidthRate", None))
-
-    relay_rate = self.get_option("RelayBandwidthRate", None)
-
-    if relay_rate and relay_rate != "0":
-      effective_rate = min(effective_rate, int(relay_rate))
-
-    max_advertised = self.get_option("MaxAdvertisedBandwidth", None)
-
-    if max_advertised:
-      effective_rate = min(effective_rate, int(max_advertised))
-
-    if effective_rate is not None:
-      return effective_rate
-    else:
-      return default
-
-  def get_my_bandwidth_burst(self, default = None):
-    """
-    Provides the effective bandwidth burst rate of this relay. Currently this
-    doesn't account for SETCONF events.
-
-    Arguments:
-      default - result if the query fails
-    """
-
-    # effective burst (same for BandwidthBurst and RelayBandwidthBurst)
-    effective_burst = int(self.get_option("BandwidthBurst", None))
-
-    relay_burst = self.get_option("RelayBandwidthBurst", None)
-
-    if relay_burst and relay_burst != "0":
-      effective_burst = min(effective_burst, int(relay_burst))
-
-    if effective_burst is not None:
-      return effective_burst
-    else:
-      return default
-
-  def get_my_bandwidth_observed(self, default = None):
-    """
-    Provides the relay's current observed bandwidth (the throughput determined
-    from historical measurements on the client side). This is used in the
-    heuristic used for path selection if the measured bandwidth is undefined.
-    This is fetched from the descriptors and hence will get stale if
-    descriptors aren't periodically updated.
-
-    Arguments:
-      default - result if the query fails
-    """
-
-    my_fingerprint = self.get_info("fingerprint", None)
-
-    if my_fingerprint:
-      my_descriptor = self.controller.get_server_descriptor(my_fingerprint)
-
-      if my_descriptor:
-        return my_descriptor.observed_bandwidth
-
-    return default
-
-  def get_my_bandwidth_measured(self, default = None):
-    """
-    Provides the relay's current measured bandwidth (the throughput as noted by
-    the directory authorities and used by clients for relay selection). This is
-    undefined if not in the consensus or with older versions of Tor. Depending
-    on the circumstances this can be from a variety of things (observed,
-    measured, weighted measured, etc) as described by:
-    https://trac.torproject.org/projects/tor/ticket/1566
-
-    Arguments:
-      default - result if the query fails
-    """
-
-    # TODO: Tor is documented as providing v2 router status entries but
-    # actually looks to be v3. This needs to be sorted out between stem
-    # and tor.
-
-    my_fingerprint = self.get_info("fingerprint", None)
-
-    if my_fingerprint:
-      my_status_entry = self.controller.get_network_status(my_fingerprint)
-
-      if my_status_entry and hasattr(my_status_entry, 'bandwidth'):
-        return my_status_entry.bandwidth
-
-    return default
-
-  def get_my_flags(self, default = None):
-    """
-    Provides the flags held by this relay.
-
-    Arguments:
-      default - result if the query fails or this relay isn't a part of the consensus yet
-    """
-
-    my_fingerprint = self.get_info("fingerprint", None)
-
-    if my_fingerprint:
-      my_status_entry = self.controller.get_network_status(my_fingerprint)
-
-      if my_status_entry:
-        return my_status_entry.flags
-
-    return default
-
-  def get_version(self):
-    """
-    Provides the version of our tor instance, this is None if we don't have a
-    connection.
-    """
-
-    self.conn_lock.acquire()
-
-    try:
-      return self.controller.get_version()
-    except stem.SocketClosed:
-      self.close()
-      return None
-    except:
-      return None
-    finally:
-      self.conn_lock.release()
-
-  def is_geoip_unavailable(self):
-    """
-    Provides true if we've concluded that our geoip database is unavailable,
-    false otherwise.
-    """
-
-    if self.is_alive():
-      return self.controller.is_geoip_unavailable()
-    else:
-      return False
-
-  def get_my_user(self):
-    """
-    Provides the user this process is running under. If unavailable this
-    provides None.
-    """
-
-    return self.controller.get_user(None)
-
-  def get_my_file_descriptor_usage(self):
-    """
-    Provides the number of file descriptors currently being used by this
-    process. This returns None if this can't be determined.
-    """
-
-    # The file descriptor usage is the size of the '/proc/<pid>/fd' contents
-    # http://linuxshellaccount.blogspot.com/2008/06/finding-number-of-open-file-descriptors.html
-    # I'm not sure about other platforms (like BSD) so erroring out there.
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive() and proc.is_available():
-      my_pid = self.controller.get_pid(None)
-
-      if my_pid:
-        try:
-          result = len(os.listdir("/proc/%s/fd" % my_pid))
-        except:
-          pass
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_my_file_descriptor_limit(self):
-    """
-    Provides the maximum number of file descriptors this process can have.
-    Only the Tor process itself reliably knows this value, and the option for
-    getting this was added in Tor 0.2.3.x-final. If that's unavailable then
-    we can only estimate the file descriptor limit based on other factors.
-
-    The return result is a tuple of the form:
-    (fileDescLimit, isEstimate)
-    and if all methods fail then both values are None.
-    """
-
-    # provides -1 if the query fails
-    queried_limit = self.get_info("process/descriptor-limit", None)
-
-    if queried_limit is not None and queried_limit != "-1":
-      return (int(queried_limit), False)
-
-    tor_user = self.get_my_user()
-
-    # This is guessing the open file limit. Unfortunately there's no way
-    # (other than "/usr/proc/bin/pfiles pid | grep rlimit" under Solaris)
-    # to get the file descriptor limit for an arbitrary process.
-
-    if tor_user == "debian-tor":
-      # probably loaded via /etc/init.d/tor which changes descriptor limit
-      return (8192, True)
-    else:
-      # uses ulimit to estimate (-H is for hard limit, which is what tor uses)
-      ulimit_results = system.call("ulimit -Hn")
-
-      if ulimit_results:
-        ulimit = ulimit_results[0].strip()
-
-        if ulimit.isdigit():
-          return (int(ulimit), True)
-
-    return (None, None)
-
-  def get_start_time(self):
-    """
-    Provides the unix time for when the tor process first started. If this
-    can't be determined then this provides None.
-    """
-
-    try:
-      return system.get_start_time(self.controller.get_pid())
-    except:
-      return None
-
-  def is_exiting_allowed(self, ip_address, port):
-    """
-    Checks if the given destination can be exited to by this relay, returning
-    True if so and False otherwise.
-    """
-
-    self.conn_lock.acquire()
-
-    result = False
-
-    if self.is_alive():
-      # If we allow any exiting then this could be relayed DNS queries,
-      # otherwise the policy is checked. Tor still makes DNS connections to
-      # test when exiting isn't allowed, but nothing is relayed over them.
-      # I'm registering these as non-exiting to avoid likely user confusion:
-      # https://trac.torproject.org/projects/tor/ticket/965
-
-      our_policy = self.get_exit_policy()
-
-      if our_policy and our_policy.is_exiting_allowed() and port == "53":
-        result = True
-      else:
-        result = our_policy and our_policy.can_exit_to(ip_address, port)
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_exit_policy(self):
-    """
-    Provides an ExitPolicy instance for the head of this relay's exit policy
-    chain. If there's no active connection then this provides None.
-    """
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive():
-      try:
-        result = self.controller.get_exit_policy(None)
-      except:
-        pass
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_consensus_entry(self, relay_fingerprint):
-    """
-    Provides the most recently available consensus information for the given
-    relay. This is none if no such information exists.
-
-    Arguments:
-      relay_fingerprint - fingerprint of the relay
-    """
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive():
-      if not relay_fingerprint in self._consensus_lookup_cache:
-        ns_entry = self.get_info("ns/id/%s" % relay_fingerprint, None)
-        self._consensus_lookup_cache[relay_fingerprint] = ns_entry
-
-      result = self._consensus_lookup_cache[relay_fingerprint]
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_descriptor_entry(self, relay_fingerprint):
-    """
-    Provides the most recently available descriptor information for the given
-    relay. Unless FetchUselessDescriptors is set this may frequently be
-    unavailable. If no such descriptor is available then this returns None.
-
-    Arguments:
-      relay_fingerprint - fingerprint of the relay
-    """
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive():
-      if not relay_fingerprint in self._descriptor_lookup_cache:
-        desc_entry = self.get_info("desc/id/%s" % relay_fingerprint, None)
-        self._descriptor_lookup_cache[relay_fingerprint] = desc_entry
-
-      result = self._descriptor_lookup_cache[relay_fingerprint]
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_relay_fingerprint(self, relay_address, relay_port = None, get_all_matches = False):
-    """
-    Provides the fingerprint associated with the given address. If there's
-    multiple potential matches or the mapping is unknown then this returns
-    None. This disambiguates the fingerprint if there's multiple relays on
-    the same ip address by several methods, one of them being to pick relays
-    we have a connection with.
-
-    Arguments:
-      relay_address  - address of relay to be returned
-      relay_port     - orport of relay (to further narrow the results)
-      get_all_matches - ignores the relay_port and provides all of the
-                      (port, fingerprint) tuples matching the given
-                      address
-    """
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive():
-      if get_all_matches:
-        # populates the ip -> fingerprint mappings if not yet available
-        if self._fingerprint_mappings is None:
-          self._fingerprint_mappings = self._get_fingerprint_mappings()
-
-        if relay_address in self._fingerprint_mappings:
-          result = self._fingerprint_mappings[relay_address]
-        else:
-          result = []
-      else:
-        # query the fingerprint if it isn't yet cached
-        if not (relay_address, relay_port) in self._fingerprint_lookup_cache:
-          relay_fingerprint = self._get_relay_fingerprint(relay_address, relay_port)
-          self._fingerprint_lookup_cache[(relay_address, relay_port)] = relay_fingerprint
-
-        result = self._fingerprint_lookup_cache[(relay_address, relay_port)]
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_relay_nickname(self, relay_fingerprint):
-    """
-    Provides the nickname associated with the given relay. This provides None
-    if no such relay exists, and "Unnamed" if the name hasn't been set.
-
-    Arguments:
-      relay_fingerprint - fingerprint of the relay
-    """
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive():
-      # query the nickname if it isn't yet cached
-      if not relay_fingerprint in self._nickname_lookup_cache:
-        if relay_fingerprint == self.get_info("fingerprint", None):
-          # this is us, simply check the config
-          my_nickname = self.get_option("Nickname", "Unnamed")
-          self._nickname_lookup_cache[relay_fingerprint] = my_nickname
-        else:
-          ns_entry = self.controller.get_network_status(relay_fingerprint, None)
-
-          if ns_entry:
-            self._nickname_lookup_cache[relay_fingerprint] = ns_entry.nickname
-
-      result = self._nickname_lookup_cache[relay_fingerprint]
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_relay_exit_policy(self, relay_fingerprint):
-    """
-    Provides the ExitPolicy instance associated with the given relay. The tor
-    consensus entries don't indicate if private addresses are rejected or
-    address-specific policies, so this is only used as a fallback if a recent
-    descriptor is unavailable. This returns None if unable to determine the
-    policy.
-
-    Arguments:
-      relay_fingerprint - fingerprint of the relay
-    """
-
-    self.conn_lock.acquire()
-
-    result = None
-
-    if self.is_alive():
-      # attempts to fetch the policy via the descriptor
-      descriptor = self.controller.get_server_descriptor(relay_fingerprint, None)
-
-      if descriptor:
-        result = descriptor.exit_policy
-
-    self.conn_lock.release()
-
-    return result
-
-  def get_relay_address(self, relay_fingerprint, default = None):
-    """
-    Provides the (IP Address, ORPort) tuple for a given relay. If the lookup
-    fails then this returns the default.
-
-    Arguments:
-      relay_fingerprint - fingerprint of the relay
-    """
-
-    self.conn_lock.acquire()
-
-    result = default
-
-    if self.is_alive():
-      # query the address if it isn't yet cached
-      if not relay_fingerprint in self._address_lookup_cache:
-        if relay_fingerprint == self.get_info("fingerprint", None):
-          # this is us, simply check the config
-          my_address = self.get_info("address", None)
-          my_or_port = self.get_option("ORPort", None)
-
-          if my_address and my_or_port:
-            self._address_lookup_cache[relay_fingerprint] = (my_address, my_or_port)
-        else:
-          # check the consensus for the relay
-          ns_entry = self.get_consensus_entry(relay_fingerprint)
-
-          if ns_entry:
-            ns_line_comp = ns_entry.split("\n")[0].split(" ")
-
-            if len(ns_line_comp) >= 8:
-              self._address_lookup_cache[relay_fingerprint] = (ns_line_comp[6], ns_line_comp[7])
-
-      result = self._address_lookup_cache.get(relay_fingerprint, default)
-
-    self.conn_lock.release()
-
-    return result
-
-  def add_event_listener(self, listener, *event_types):
-    """
-    Directs further tor controller events to callback functions of the
-    listener. If a new control connection is initialized then this listener is
-    reattached.
-    """
-
-    self.conn_lock.acquire()
-
-    if self.is_alive():
-      self.controller.add_event_listener(listener, *event_types)
-
-    self.conn_lock.release()
-
-  def remove_event_listener(self, listener):
-    """
-    Stops the given event listener from being notified of further events.
-    """
-
-    self.conn_lock.acquire()
-
-    if self.is_alive():
-      self.controller.remove_event_listener(listener)
-
-    self.conn_lock.release()
-
-  def add_status_listener(self, callback):
-    """
-    Directs further events related to tor's controller status to the callback
-    function.
-
-    Arguments:
-      callback - functor that'll accept the events, expected to be of the form:
-                 myFunction(controller, event_type)
-    """
-
-    self.controller.add_status_listener(callback)
-
-  def reload(self):
-    """
-    This resets tor (sending a RELOAD signal to the control port) causing tor's
-    internal state to be reset and the torrc reloaded.
-    """
-
-    self.conn_lock.acquire()
-
-    try:
-      if self.is_alive():
-        try:
-          self.controller.signal(stem.Signal.RELOAD)
-        except Exception as exc:
-          # new torrc parameters caused an error (tor's likely shut down)
-          raise IOError(str(exc))
-    finally:
-      self.conn_lock.release()
-
-  def shutdown(self, force = False):
-    """
-    Sends a shutdown signal to the attached tor instance. For relays the
-    actual shutdown is delayed for thirty seconds unless the force flag is
-    given. This raises an IOError if a signal is sent but fails.
-
-    Arguments:
-      force - triggers an immediate shutdown for relays if True
-    """
-
-    self.conn_lock.acquire()
-
-    raised_exception = None
-
-    if self.is_alive():
-      try:
-        is_relay = self.get_option("ORPort", None) is not None
-
-        if force:
-          self.controller.signal(stem.Signal.HALT)
-        else:
-          self.controller.signal(stem.Signal.SHUTDOWN)
-
-        # shuts down control connection if we aren't making a delayed shutdown
-
-        if force or not is_relay:
-          self.close()
-      except Exception as exc:
-        raised_exception = IOError(str(exc))
-
-    self.conn_lock.release()
-
-    if raised_exception:
-      raise raised_exception
-
-  def ns_event(self, event):
-    self._consensus_lookup_cache = {}
-
-  def new_consensus_event(self, event):
-    self.conn_lock.acquire()
-
-    # reconstructs consensus based mappings
-
-    self._fingerprint_lookup_cache = {}
-    self._nickname_lookup_cache = {}
-    self._address_lookup_cache = {}
-    self._consensus_lookup_cache = {}
-
-    if self._fingerprint_mappings is not None:
-      self._fingerprint_mappings = self._get_fingerprint_mappings(event.desc)
-
-    self.conn_lock.release()
-
-  def new_desc_event(self, event):
-    self.conn_lock.acquire()
-
-    desc_fingerprints = [fingerprint for (fingerprint, nickname) in event.relays]
-
-    # If we're tracking ip address -> fingerprint mappings then update with
-    # the new relays.
-
-    self._fingerprint_lookup_cache = {}
-    self._descriptor_lookup_cache = {}
-
-    if self._fingerprint_mappings is not None:
-      for fingerprint in desc_fingerprints:
-        # gets consensus data for the new descriptor
-
-        try:
-          desc = self.controller.get_network_status(fingerprint)
-        except stem.ControllerError:
-          continue
-
-        # updates fingerprintMappings with new data
-
-        if desc.address in self._fingerprint_mappings:
-          # if entry already exists with the same orport, remove it
-
-          orport_match = None
-
-          for entry_port, entry_fingerprint in self._fingerprint_mappings[desc.address]:
-            if entry_port == desc.or_port:
-              orport_match = (entry_port, entry_fingerprint)
-              break
-
-          if orport_match:
-            self._fingerprint_mappings[desc.address].remove(orport_match)
-
-          # add the new entry
-
-          self._fingerprint_mappings[desc.address].append((desc.or_port, desc.fingerprint))
-        else:
-          self._fingerprint_mappings[desc.address] = [(desc.or_port, desc.fingerprint)]
-
-    self.conn_lock.release()
-
-  def _get_fingerprint_mappings(self, descriptors = None):
-    """
-    Provides IP address to (port, fingerprint) tuple mappings for all of the
-    currently cached relays.
-
-    Arguments:
-      descriptors - router status entries (fetched if not provided)
-    """
-
-    results = {}
-
-    if self.is_alive():
-      # fetch the current network status if not provided
-
-      if not descriptors:
-        try:
-          descriptors = self.controller.get_network_statuses()
-        except stem.ControllerError:
-          descriptors = []
-
-      # construct mappings of ips to relay data
-
-      for desc in descriptors:
-        results.setdefault(desc.address, []).append((desc.or_port, desc.fingerprint))
-
-    return results
-
-  def _get_relay_fingerprint(self, relay_address, relay_port):
-    """
-    Provides the fingerprint associated with the address/port combination.
-
-    Arguments:
-      relay_address - address of relay to be returned
-      relay_port    - orport of relay (to further narrow the results)
-    """
-
-    # If we were provided with a string port then convert to an int (so
-    # lookups won't mismatch based on type).
-
-    if isinstance(relay_port, str):
-      relay_port = int(relay_port)
-
-    # checks if this matches us
-
-    if relay_address == self.get_info("address", None):
-      if not relay_port or relay_port == self.get_option("ORPort", None):
-        return self.get_info("fingerprint", None)
-
-    # if we haven't yet populated the ip -> fingerprint mappings then do so
-
-    if self._fingerprint_mappings is None:
-      self._fingerprint_mappings = self._get_fingerprint_mappings()
-
-    potential_matches = self._fingerprint_mappings.get(relay_address)
-
-    if not potential_matches:
-      return None  # no relay matches this ip address
-
-    if len(potential_matches) == 1:
-      # There's only one relay belonging to this ip address. If the port
-      # matches then we're done.
-
-      match = potential_matches[0]
-
-      if relay_port and match[0] != relay_port:
-        return None
-      else:
-        return match[1]
-    elif relay_port:
-      # Multiple potential matches, so trying to match based on the port.
-      for entry_port, entry_fingerprint in potential_matches:
-        if entry_port == relay_port:
-          return entry_fingerprint
-
-    return None
diff --git a/arm/util/tor_config.py b/arm/util/tor_config.py
new file mode 100644
index 0000000..46db391
--- /dev/null
+++ b/arm/util/tor_config.py
@@ -0,0 +1,1301 @@
+"""
+Helper functions for working with tor's configuration file.
+"""
+
+import os
+import time
+import socket
+import threading
+
+import stem.version
+
+from arm.util import tor_tools, ui_tools
+
+from stem.util import conf, enum, log, str_tools, system
+
+# filename used for cached tor config descriptions
+
+CONFIG_DESC_FILENAME = "torConfigDesc.txt"
+
+# messages related to loading the tor configuration descriptions
+
+DESC_LOAD_SUCCESS_MSG = "Loaded configuration descriptions from '%s' (runtime: %0.3f)"
+DESC_LOAD_FAILED_MSG = "Unable to load configuration descriptions (%s)"
+DESC_INTERNAL_LOAD_SUCCESS_MSG = "Falling back to descriptions for Tor %s"
+DESC_INTERNAL_LOAD_FAILED_MSG = "Unable to load fallback descriptions. Categories and help for Tor's configuration options won't be available. (%s)"
+DESC_READ_MAN_SUCCESS_MSG = "Read descriptions for tor's configuration options from its man page (runtime %0.3f)"
+DESC_READ_MAN_FAILED_MSG = "Unable to get the descriptions of Tor's configuration options from its man page (%s)"
+DESC_SAVE_SUCCESS_MSG = "Saved configuration descriptions to '%s' (runtime: %0.3f)"
+DESC_SAVE_FAILED_MSG = "Unable to save configuration descriptions (%s)"
+
+
+def conf_handler(key, value):
+  if key == "torrc.important":
+    # stores lowercase entries to drop case sensitivity
+    return [entry.lower() for entry in value]
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.torrc.validate": True,
+  "torrc.important": [],
+  "torrc.alias": {},
+  "torrc.units.size.b": [],
+  "torrc.units.size.kb": [],
+  "torrc.units.size.mb": [],
+  "torrc.units.size.gb": [],
+  "torrc.units.size.tb": [],
+  "torrc.units.time.sec": [],
+  "torrc.units.time.min": [],
+  "torrc.units.time.hour": [],
+  "torrc.units.time.day": [],
+  "torrc.units.time.week": [],
+  "startup.data_directory": "~/.arm",
+  "features.config.descriptions.enabled": True,
+  "features.config.descriptions.persist": True,
+  "tor.chroot": '',
+}, conf_handler)
+
+
+def general_conf_handler(config, key):
+  value = config.get(key)
+
+  if key.startswith("torrc.summary."):
+    # we'll look for summary keys with a lowercase config name
+    CONFIG[key.lower()] = value
+  elif key.startswith("torrc.units.") and value:
+    # all the torrc.units.* values are comma separated lists
+    return [entry.strip() for entry in value[0].split(",")]
+
+
+conf.get_config("arm").add_listener(general_conf_handler, backfill = True)
+
+# enums and values for numeric torrc entries
+
+ValueType = enum.Enum("UNRECOGNIZED", "SIZE", "TIME")
+SIZE_MULT = {"b": 1, "kb": 1024, "mb": 1048576, "gb": 1073741824, "tb": 1099511627776}
+TIME_MULT = {"sec": 1, "min": 60, "hour": 3600, "day": 86400, "week": 604800}
+
+# enums for issues found during torrc validation:
+# DUPLICATE  - entry is ignored due to being a duplicate
+# MISMATCH   - the value doesn't match tor's current state
+# MISSING    - value differs from its default but is missing from the torrc
+# IS_DEFAULT - the configuration option matches tor's default
+
+ValidationError = enum.Enum("DUPLICATE", "MISMATCH", "MISSING", "IS_DEFAULT")
+
+# descriptions of tor's configuration options fetched from its man page
+
+CONFIG_DESCRIPTIONS_LOCK = threading.RLock()
+CONFIG_DESCRIPTIONS = {}
+
+# categories for tor configuration options
+
+Category = enum.Enum("GENERAL", "CLIENT", "RELAY", "DIRECTORY", "AUTHORITY", "HIDDEN_SERVICE", "TESTING", "UNKNOWN")
+
+TORRC = None  # singleton torrc instance
+MAN_OPT_INDENT = 7  # indentation before options in the man page
+MAN_EX_INDENT = 15  # indentation used for man page examples
+PERSIST_ENTRY_DIVIDER = "-" * 80 + "\n"  # splits config entries when saving to a file
+MULTILINE_PARAM = None  # cached multiline parameters (lazily loaded)
+
+# torrc options that bind to ports
+
+PORT_OPT = ("SocksPort", "ORPort", "DirPort", "ControlPort", "TransPort")
+
+
+class ManPageEntry:
+  """
+  Information provided about a tor configuration option in its man page entry.
+  """
+
+  def __init__(self, option, index, category, arg_usage, description):
+    self.option = option
+    self.index = index
+    self.category = category
+    self.arg_usage = arg_usage
+    self.description = description
+
+
+def get_torrc():
+  """
+  Singleton constructor for a Controller. Be aware that this starts as being
+  unloaded, needing the torrc contents to be loaded before being functional.
+  """
+
+  global TORRC
+
+  if TORRC is None:
+    TORRC = Torrc()
+
+  return TORRC
+
+
+def load_option_descriptions(load_path = None, check_version = True):
+  """
+  Fetches and parses descriptions for tor's configuration options from its man
+  page. This can be a somewhat lengthy call, and raises an IOError if issues
+  occure. When successful loading from a file this returns the version for the
+  contents loaded.
+
+  If available, this can load the configuration descriptions from a file where
+  they were previously persisted to cut down on the load time (latency for this
+  is around 200ms).
+
+  Arguments:
+    load_path     - if set, this attempts to fetch the configuration
+                   descriptions from the given path instead of the man page
+    check_version - discards the results if true and tor's version doens't
+                   match the cached descriptors, otherwise accepts anyway
+  """
+
+  CONFIG_DESCRIPTIONS_LOCK.acquire()
+  CONFIG_DESCRIPTIONS.clear()
+
+  raised_exc = None
+  loaded_version = ""
+
+  try:
+    if load_path:
+      # Input file is expected to be of the form:
+      # <option>
+      # <arg description>
+      # <description, possibly multiple lines>
+      # <PERSIST_ENTRY_DIVIDER>
+      input_file = open(load_path, "r")
+      input_file_contents = input_file.readlines()
+      input_file.close()
+
+      try:
+        version_line = input_file_contents.pop(0).rstrip()
+
+        if version_line.startswith("Tor Version "):
+          file_version = version_line[12:]
+          loaded_version = file_version
+          tor_version = tor_tools.get_conn().get_info("version", "")
+
+          if check_version and file_version != tor_version:
+            msg = "wrong version, tor is %s but the file's from %s" % (tor_version, file_version)
+            raise IOError(msg)
+        else:
+          raise IOError("unable to parse version")
+
+        while input_file_contents:
+          # gets category enum, failing if it doesn't exist
+          category = input_file_contents.pop(0).rstrip()
+
+          if not category in Category:
+            base_msg = "invalid category in input file: '%s'"
+            raise IOError(base_msg % category)
+
+          # gets the position in the man page
+          index_arg, index_str = -1, input_file_contents.pop(0).rstrip()
+
+          if index_str.startswith("index: "):
+            index_str = index_str[7:]
+
+            if index_str.isdigit():
+              index_arg = int(index_str)
+            else:
+              raise IOError("non-numeric index value: %s" % index_str)
+          else:
+            raise IOError("malformed index argument: %s" % index_str)
+
+          option = input_file_contents.pop(0).rstrip()
+          argument = input_file_contents.pop(0).rstrip()
+
+          description, loaded_line = "", input_file_contents.pop(0)
+
+          while loaded_line != PERSIST_ENTRY_DIVIDER:
+            description += loaded_line
+
+            if input_file_contents:
+              loaded_line = input_file_contents.pop(0)
+            else:
+              break
+
+          CONFIG_DESCRIPTIONS[option.lower()] = ManPageEntry(option, index_arg, category, argument, description.rstrip())
+      except IndexError:
+        CONFIG_DESCRIPTIONS.clear()
+        raise IOError("input file format is invalid")
+    else:
+      man_call_results = system.call("man tor", None)
+
+      if not man_call_results:
+        raise IOError("man page not found")
+
+      # Fetches all options available with this tor instance. This isn't
+      # vital, and the valid_options are left empty if the call fails.
+
+      conn, valid_options = tor_tools.get_conn(), []
+      config_option_query = conn.get_info("config/names", None)
+
+      if config_option_query:
+        for line in config_option_query.strip().split("\n"):
+          valid_options.append(line[:line.find(" ")].lower())
+
+      option_count, last_option, last_arg = 0, None, None
+      last_category, last_description = Category.GENERAL, ""
+
+      for line in man_call_results:
+        line = ui_tools.get_printable(line)
+        stripped_line = line.strip()
+
+        # we have content, but an indent less than an option (ignore line)
+        #if stripped_line and not line.startswith(" " * MAN_OPT_INDENT): continue
+
+        # line starts with an indent equivilant to a new config option
+
+        is_opt_indent = line.startswith(" " * MAN_OPT_INDENT) and line[MAN_OPT_INDENT] != " "
+
+        is_category_line = not line.startswith(" ") and "OPTIONS" in line
+
+        # if this is a category header or a new option, add an entry using the
+        # buffered results
+
+        if is_opt_indent or is_category_line:
+          # Filters the line based on if the option is recognized by tor or
+          # not. This isn't necessary for arm, so if unable to make the check
+          # then we skip filtering (no loss, the map will just have some extra
+          # noise).
+
+          stripped_description = last_description.strip()
+
+          if last_option and (not valid_options or last_option.lower() in valid_options):
+            CONFIG_DESCRIPTIONS[last_option.lower()] = ManPageEntry(last_option, option_count, last_category, last_arg, stripped_description)
+            option_count += 1
+
+          last_description = ""
+
+          # parses the option and argument
+
+          line = line.strip()
+          div_index = line.find(" ")
+
+          if div_index != -1:
+            last_option, last_arg = line[:div_index], line[div_index + 1:]
+
+          # if this is a category header then switch it
+
+          if is_category_line:
+            if line.startswith("OPTIONS"):
+              last_category = Category.GENERAL
+            elif line.startswith("CLIENT"):
+              last_category = Category.CLIENT
+            elif line.startswith("SERVER"):
+              last_category = Category.RELAY
+            elif line.startswith("DIRECTORY SERVER"):
+              last_category = Category.DIRECTORY
+            elif line.startswith("DIRECTORY AUTHORITY SERVER"):
+              last_category = Category.AUTHORITY
+            elif line.startswith("HIDDEN SERVICE"):
+              last_category = Category.HIDDEN_SERVICE
+            elif line.startswith("TESTING NETWORK"):
+              last_category = Category.TESTING
+            else:
+              log.notice("Unrecognized category in the man page: %s" % line.strip())
+        else:
+          # Appends the text to the running description. Empty lines and lines
+          # starting with a specific indentation are used for formatting, for
+          # instance the ExitPolicy and TestingTorNetwork entries.
+
+          if last_description and last_description[-1] != "\n":
+            last_description += " "
+
+          if not stripped_line:
+            last_description += "\n\n"
+          elif line.startswith(" " * MAN_EX_INDENT):
+            last_description += "    %s\n" % stripped_line
+          else:
+            last_description += stripped_line
+  except IOError as exc:
+    raised_exc = exc
+
+  CONFIG_DESCRIPTIONS_LOCK.release()
+
+  if raised_exc:
+    raise raised_exc
+  else:
+    return loaded_version
+
+
+def save_option_descriptions(path):
+  """
+  Preserves the current configuration descriptors to the given path. This
+  raises an IOError or OSError if unable to do so.
+
+  Arguments:
+    path - location to persist configuration descriptors
+  """
+
+  # make dir if the path doesn't already exist
+
+  base_dir = os.path.dirname(path)
+
+  if not os.path.exists(base_dir):
+    os.makedirs(base_dir)
+
+  output_file = open(path, "w")
+
+  CONFIG_DESCRIPTIONS_LOCK.acquire()
+  sorted_options = CONFIG_DESCRIPTIONS.keys()
+  sorted_options.sort()
+
+  tor_version = tor_tools.get_conn().get_info("version", "")
+  output_file.write("Tor Version %s\n" % tor_version)
+
+  for i in range(len(sorted_options)):
+    man_entry = get_config_description(sorted_options[i])
+    output_file.write("%s\nindex: %i\n%s\n%s\n%s\n" % (man_entry.category, man_entry.index, man_entry.option, man_entry.arg_usage, man_entry.description))
+
+    if i != len(sorted_options) - 1:
+      output_file.write(PERSIST_ENTRY_DIVIDER)
+
+  output_file.close()
+  CONFIG_DESCRIPTIONS_LOCK.release()
+
+
+def get_config_summary(option):
+  """
+  Provides a short summary description of the configuration option. If none is
+  known then this proivdes None.
+
+  Arguments:
+    option - tor config option
+  """
+
+  return CONFIG.get("torrc.summary.%s" % option.lower())
+
+
+def is_important(option):
+  """
+  Provides True if the option has the 'important' flag in the configuration,
+  False otherwise.
+
+  Arguments:
+    option - tor config option
+  """
+
+  return option.lower() in CONFIG["torrc.important"]
+
+
+def get_config_description(option):
+  """
+  Provides ManPageEntry instances populated with information fetched from the
+  tor man page. This provides None if no such option has been loaded. If the
+  man page is in the process of being loaded then this call blocks until it
+  finishes.
+
+  Arguments:
+    option - tor config option
+  """
+
+  CONFIG_DESCRIPTIONS_LOCK.acquire()
+
+  if option.lower() in CONFIG_DESCRIPTIONS:
+    return_val = CONFIG_DESCRIPTIONS[option.lower()]
+  else:
+    return_val = None
+
+  CONFIG_DESCRIPTIONS_LOCK.release()
+  return return_val
+
+
+def get_config_options():
+  """
+  Provides the configuration options from the loaded man page. This is an empty
+  list if no man page has been loaded.
+  """
+
+  CONFIG_DESCRIPTIONS_LOCK.acquire()
+
+  return_val = [CONFIG_DESCRIPTIONS[opt].option for opt in CONFIG_DESCRIPTIONS]
+
+  CONFIG_DESCRIPTIONS_LOCK.release()
+  return return_val
+
+
+def get_config_location():
+  """
+  Provides the location of the torrc, raising an IOError with the reason if the
+  path can't be determined.
+  """
+
+  conn = tor_tools.get_conn()
+  config_location = conn.get_info("config-file", None)
+  tor_pid, tor_prefix = conn.controller.get_pid(None), CONFIG['tor.chroot']
+
+  if not config_location:
+    raise IOError("unable to query the torrc location")
+
+  try:
+    tor_cwd = system.get_cwd(tor_pid)
+    return tor_prefix + system.expand_path(config_location, tor_cwd)
+  except IOError as exc:
+    raise IOError("querying tor's pwd failed because %s" % exc)
+
+
+def get_multiline_parameters():
+  """
+  Provides parameters that can be defined multiple times in the torrc without
+  overwriting the value.
+  """
+
+  # fetches config options with the LINELIST (aka 'LineList'), LINELIST_S (aka
+  # 'Dependent'), and LINELIST_V (aka 'Virtual') types
+
+  global MULTILINE_PARAM
+
+  if MULTILINE_PARAM is None:
+    conn, multiline_entries = tor_tools.get_conn(), []
+
+    config_option_query = conn.get_info("config/names", None)
+
+    if config_option_query:
+      for line in config_option_query.strip().split("\n"):
+        conf_option, conf_type = line.strip().split(" ", 1)
+
+        if conf_type in ("LineList", "Dependant", "Virtual"):
+          multiline_entries.append(conf_option)
+    else:
+      # unable to query tor connection, so not caching results
+      return ()
+
+    MULTILINE_PARAM = multiline_entries
+
+  return tuple(MULTILINE_PARAM)
+
+
+def get_custom_options(include_value = False):
+  """
+  Provides the torrc parameters that differ from their defaults.
+
+  Arguments:
+    include_value - provides the current value with results if true, otherwise
+                   this just contains the options
+  """
+
+  config_text = tor_tools.get_conn().get_info("config-text", "").strip()
+  config_lines = config_text.split("\n")
+
+  # removes any duplicates
+
+  config_lines = list(set(config_lines))
+
+  # The "GETINFO config-text" query only provides options that differ
+  # from Tor's defaults with the exception of its Log and Nickname entries
+  # which, even if undefined, returns "Log notice stdout" as per:
+  # https://trac.torproject.org/projects/tor/ticket/2362
+  #
+  # If this is from the deb then it will be "Log notice file /var/log/tor/log"
+  # due to special patching applied to it, as per:
+  # https://trac.torproject.org/projects/tor/ticket/4602
+
+  try:
+    config_lines.remove("Log notice stdout")
+  except ValueError:
+    pass
+
+  try:
+    config_lines.remove("Log notice file /var/log/tor/log")
+  except ValueError:
+    pass
+
+  try:
+    config_lines.remove("Nickname %s" % socket.gethostname())
+  except ValueError:
+    pass
+
+  if include_value:
+    return config_lines
+  else:
+    return [line[:line.find(" ")] for line in config_lines]
+
+
+def save_conf(destination = None, contents = None):
+  """
+  Saves the configuration to the given path. If this is equivilant to
+  issuing a SAVECONF (the contents and destination match what tor's using)
+  then that's done. Otherwise, this writes the contents directly. This raises
+  an IOError if unsuccessful.
+
+  Arguments:
+    destination - path to be saved to, the current config location if None
+    contents    - configuration to be saved, the current config if None
+  """
+
+  if destination:
+    destination = os.path.abspath(destination)
+
+  # fills default config values, and sets is_saveconf to false if they differ
+  # from the arguments
+
+  is_saveconf, start_time = True, time.time()
+
+  current_config = get_custom_options(True)
+
+  if not contents:
+    contents = current_config
+  else:
+    is_saveconf &= contents == current_config
+
+  # The "GETINFO config-text" option was introduced in Tor version 0.2.2.7. If
+  # we're writing custom contents then this is fine, but if we're trying to
+  # save the current configuration then we need to fail if it's unavailable.
+  # Otherwise we'd write a blank torrc as per...
+  # https://trac.torproject.org/projects/tor/ticket/3614
+
+  if contents == ['']:
+    # double check that "GETINFO config-text" is unavailable rather than just
+    # giving an empty result
+
+    if tor_tools.get_conn().get_info("config-text", None) is None:
+      raise IOError("determining the torrc requires Tor version 0.2.2.7")
+
+  current_location = None
+
+  try:
+    current_location = get_config_location()
+
+    if not destination:
+      destination = current_location
+    else:
+      is_saveconf &= destination == current_location
+  except IOError:
+    pass
+
+  if not destination:
+    raise IOError("unable to determine the torrc's path")
+
+  log_msg = "Saved config by %%s to %s (runtime: %%0.4f)" % destination
+
+  # attempts SAVECONF if we're updating our torrc with the current state
+
+  if is_saveconf:
+    try:
+      tor_tools.get_conn().save_conf()
+
+      try:
+        get_torrc().load()
+      except IOError:
+        pass
+
+      log.debug(log_msg % ("SAVECONF", time.time() - start_time))
+      return  # if successful then we're done
+    except:
+      pass
+
+  # if the SAVECONF fails or this is a custom save then write contents directly
+
+  try:
+    # make dir if the path doesn't already exist
+
+    base_dir = os.path.dirname(destination)
+
+    if not os.path.exists(base_dir):
+      os.makedirs(base_dir)
+
+    # saves the configuration to the file
+
+    config_file = open(destination, "w")
+    config_file.write("\n".join(contents))
+    config_file.close()
+  except (IOError, OSError) as exc:
+    raise IOError(exc)
+
+  # reloads the cached torrc if overwriting it
+
+  if destination == current_location:
+    try:
+      get_torrc().load()
+    except IOError:
+      pass
+
+  log.debug(log_msg % ("directly writing", time.time() - start_time))
+
+
+def validate(contents = None):
+  """
+  Performs validation on the given torrc contents, providing back a listing of
+  (line number, issue, msg) tuples for issues found. If the issue occures on a
+  multiline torrc entry then the line number is for the last line of the entry.
+
+  Arguments:
+    contents - torrc contents
+  """
+
+  conn = tor_tools.get_conn()
+  custom_options = get_custom_options()
+  issues_found, seen_options = [], []
+
+  # Strips comments and collapses multiline multi-line entries, for more
+  # information see:
+  # https://trac.torproject.org/projects/tor/ticket/1929
+
+  stripped_contents, multiline_buffer = [], ""
+
+  for line in _strip_comments(contents):
+    if not line:
+      stripped_contents.append("")
+    else:
+      line = multiline_buffer + line
+      multiline_buffer = ""
+
+      if line.endswith("\\"):
+        multiline_buffer = line[:-1]
+        stripped_contents.append("")
+      else:
+        stripped_contents.append(line.strip())
+
+  for line_number in range(len(stripped_contents) - 1, -1, -1):
+    line_text = stripped_contents[line_number]
+
+    if not line_text:
+      continue
+
+    line_comp = line_text.split(None, 1)
+
+    if len(line_comp) == 2:
+      option, value = line_comp
+    else:
+      option, value = line_text, ""
+
+    # Tor is case insensetive when parsing its torrc. This poses a bit of an
+    # issue for us because we want all of our checks to be case insensetive
+    # too but also want messages to match the normal camel-case conventions.
+    #
+    # Using the custom_options to account for this. It contains the tor reported
+    # options (camel case) and is either a matching set or the following defaut
+    # value check will fail. Hence using that hash to correct the case.
+    #
+    # TODO: when refactoring for stem make this less confusing...
+
+    for custom_opt in custom_options:
+      if custom_opt.lower() == option.lower():
+        option = custom_opt
+        break
+
+    # if an aliased option then use its real name
+
+    if option in CONFIG["torrc.alias"]:
+      option = CONFIG["torrc.alias"][option]
+
+    # most parameters are overwritten if defined multiple times
+
+    if option in seen_options and not option in get_multiline_parameters():
+      issues_found.append((line_number, ValidationError.DUPLICATE, option))
+      continue
+    else:
+      seen_options.append(option)
+
+    # checks if the value isn't necessary due to matching the defaults
+
+    if not option in custom_options:
+      issues_found.append((line_number, ValidationError.IS_DEFAULT, option))
+
+    # replace aliases with their recognized representation
+
+    if option in CONFIG["torrc.alias"]:
+      option = CONFIG["torrc.alias"][option]
+
+    # tor appears to replace tabs with a space, for instance:
+    # "accept\t*:563" is read back as "accept *:563"
+
+    value = value.replace("\t", " ")
+
+    # parse value if it's a size or time, expanding the units
+
+    value, value_type = _parse_conf_value(value)
+
+    # issues GETCONF to get the values tor's currently configured to use
+
+    tor_values = conn.get_option(option, [], True)
+
+    # multiline entries can be comma separated values (for both tor and conf)
+
+    value_list = [value]
+
+    if option in get_multiline_parameters():
+      value_list = [val.strip() for val in value.split(",")]
+
+      fetched_values, tor_values = tor_values, []
+      for fetched_value in fetched_values:
+        for fetched_entry in fetched_value.split(","):
+          fetched_entry = fetched_entry.strip()
+
+          if not fetched_entry in tor_values:
+            tor_values.append(fetched_entry)
+
+    for val in value_list:
+      # checks if both the argument and tor's value are empty
+
+      is_blank_match = not val and not tor_values
+
+      if not is_blank_match and not val in tor_values:
+        # converts corrections to reader friedly size values
+
+        display_values = tor_values
+
+        if value_type == ValueType.SIZE:
+          display_values = [str_tools.get_size_label(int(val)) for val in tor_values]
+        elif value_type == ValueType.TIME:
+          display_values = [str_tools.get_time_label(int(val)) for val in tor_values]
+
+        issues_found.append((line_number, ValidationError.MISMATCH, ", ".join(display_values)))
+
+  # checks if any custom options are missing from the torrc
+
+  for option in custom_options:
+    # In new versions the 'DirReqStatistics' option is true by default and
+    # disabled on startup if geoip lookups are unavailable. If this option is
+    # missing then that's most likely the reason.
+    #
+    # https://trac.torproject.org/projects/tor/ticket/4237
+
+    if option == "DirReqStatistics":
+      continue
+
+    if not option in seen_options:
+      issues_found.append((None, ValidationError.MISSING, option))
+
+  return issues_found
+
+
+def _parse_conf_value(conf_arg):
+  """
+  Converts size or time values to their lowest units (bytes or seconds) which
+  is what GETCONF calls provide. The returned is a tuple of the value and unit
+  type.
+
+  Arguments:
+    conf_arg - torrc argument
+  """
+
+  if conf_arg.count(" ") == 1:
+    val, unit = conf_arg.lower().split(" ", 1)
+
+    if not val.isdigit():
+      return conf_arg, ValueType.UNRECOGNIZED
+
+    mult, mult_type = _get_unit_type(unit)
+
+    if mult is not None:
+      return str(int(val) * mult), mult_type
+
+  return conf_arg, ValueType.UNRECOGNIZED
+
+
+def _get_unit_type(unit):
+  """
+  Provides the type and multiplier for an argument's unit. The multiplier is
+  None if the unit isn't recognized.
+
+  Arguments:
+    unit - string representation of a unit
+  """
+
+  for label in SIZE_MULT:
+    if unit in CONFIG["torrc.units.size." + label]:
+      return SIZE_MULT[label], ValueType.SIZE
+
+  for label in TIME_MULT:
+    if unit in CONFIG["torrc.units.time." + label]:
+      return TIME_MULT[label], ValueType.TIME
+
+  return None, ValueType.UNRECOGNIZED
+
+
+def _strip_comments(contents):
+  """
+  Removes comments and extra whitespace from the given torrc contents.
+
+  Arguments:
+    contents - torrc contents
+  """
+
+  stripped_contents = []
+
+  for line in contents:
+    if line and "#" in line:
+      line = line[:line.find("#")]
+
+    stripped_contents.append(line.strip())
+
+  return stripped_contents
+
+
+class Torrc():
+  """
+  Wrapper for the torrc. All getters provide None if the contents are unloaded.
+  """
+
+  def __init__(self):
+    self.contents = None
+    self.config_location = None
+    self.vals_lock = threading.RLock()
+
+    # cached results for the current contents
+    self.displayable_contents = None
+    self.stripped_contents = None
+    self.corrections = None
+
+    # flag to indicate if we've given a load failure warning before
+    self.is_foad_fail_warned = False
+
+  def load(self, log_failure = False):
+    """
+    Loads or reloads the torrc contents, raising an IOError if there's a
+    problem.
+
+    Arguments:
+      log_failure - if the torrc fails to load and we've never provided a
+                   warning for this before then logs a warning
+    """
+
+    self.vals_lock.acquire()
+
+    # clears contents and caches
+    self.contents, self.config_location = None, None
+    self.displayable_contents = None
+    self.stripped_contents = None
+    self.corrections = None
+
+    try:
+      self.config_location = get_config_location()
+      config_file = open(self.config_location, "r")
+      self.contents = config_file.readlines()
+      config_file.close()
+    except IOError as exc:
+      if log_failure and not self.is_foad_fail_warned:
+        log.warn("Unable to load torrc (%s)" % exc.strerror)
+        self.is_foad_fail_warned = True
+
+      self.vals_lock.release()
+      raise exc
+
+    self.vals_lock.release()
+
+  def is_loaded(self):
+    """
+    Provides true if there's loaded contents, false otherwise.
+    """
+
+    return self.contents is not None
+
+  def get_config_location(self):
+    """
+    Provides the location of the loaded configuration contents. This may be
+    available, even if the torrc failed to be loaded.
+    """
+
+    return self.config_location
+
+  def get_contents(self):
+    """
+    Provides the contents of the configuration file.
+    """
+
+    self.vals_lock.acquire()
+    return_val = list(self.contents) if self.contents else None
+    self.vals_lock.release()
+    return return_val
+
+  def get_display_contents(self, strip = False):
+    """
+    Provides the contents of the configuration file, formatted in a rendering
+    frindly fashion:
+    - Tabs print as three spaces. Keeping them as tabs is problematic for
+      layouts since it's counted as a single character, but occupies several
+      cells.
+    - Strips control and unprintable characters.
+
+    Arguments:
+      strip - removes comments and extra whitespace if true
+    """
+
+    self.vals_lock.acquire()
+
+    if not self.is_loaded():
+      return_val = None
+    else:
+      if self.displayable_contents is None:
+        # restricts contents to displayable characters
+        self.displayable_contents = []
+
+        for line_number in range(len(self.contents)):
+          line_text = self.contents[line_number]
+          line_text = line_text.replace("\t", "   ")
+          line_text = ui_tools.get_printable(line_text)
+          self.displayable_contents.append(line_text)
+
+      if strip:
+        if self.stripped_contents is None:
+          self.stripped_contents = _strip_comments(self.displayable_contents)
+
+        return_val = list(self.stripped_contents)
+      else:
+        return_val = list(self.displayable_contents)
+
+    self.vals_lock.release()
+    return return_val
+
+  def get_corrections(self):
+    """
+    Performs validation on the loaded contents and provides back the
+    corrections. If validation is disabled then this won't provide any
+    results.
+    """
+
+    self.vals_lock.acquire()
+
+    if not self.is_loaded():
+      return_val = None
+    else:
+      tor_version = tor_tools.get_conn().get_version()
+      skip_validation = not CONFIG["features.torrc.validate"]
+      skip_validation |= (tor_version is None or not tor_version >= stem.version.Requirement.GETINFO_CONFIG_TEXT)
+
+      if skip_validation:
+        log.info("Skipping torrc validation (requires tor 0.2.2.7-alpha)")
+        return_val = {}
+      else:
+        if self.corrections is None:
+          self.corrections = validate(self.contents)
+
+        return_val = list(self.corrections)
+
+    self.vals_lock.release()
+    return return_val
+
+  def get_lock(self):
+    """
+    Provides the lock governing concurrent access to the contents.
+    """
+
+    return self.vals_lock
+
+  def log_validation_issues(self):
+    """
+    Performs validation on the loaded contents, and logs warnings for issues
+    that are found.
+    """
+
+    corrections = self.get_corrections()
+
+    if corrections:
+      duplicate_options, default_options, mismatch_lines, missing_options = [], [], [], []
+
+      for line_number, issue, msg in corrections:
+        if issue == ValidationError.DUPLICATE:
+          duplicate_options.append("%s (line %i)" % (msg, line_number + 1))
+        elif issue == ValidationError.IS_DEFAULT:
+          default_options.append("%s (line %i)" % (msg, line_number + 1))
+        elif issue == ValidationError.MISMATCH:
+          mismatch_lines.append(line_number + 1)
+        elif issue == ValidationError.MISSING:
+          missing_options.append(msg)
+
+      if duplicate_options or default_options:
+        msg = "Unneeded torrc entries found. They've been highlighted in blue on the torrc page."
+
+        if duplicate_options:
+          if len(duplicate_options) > 1:
+            msg += "\n- entries ignored due to having duplicates: "
+          else:
+            msg += "\n- entry ignored due to having a duplicate: "
+
+          duplicate_options.sort()
+          msg += ", ".join(duplicate_options)
+
+        if default_options:
+          if len(default_options) > 1:
+            msg += "\n- entries match their default values: "
+          else:
+            msg += "\n- entry matches its default value: "
+
+          default_options.sort()
+          msg += ", ".join(default_options)
+
+        log.notice(msg)
+
+      if mismatch_lines or missing_options:
+        msg = "The torrc differs from what tor's using. You can issue a sighup to reload the torrc values by pressing x."
+
+        if mismatch_lines:
+          if len(mismatch_lines) > 1:
+            msg += "\n- torrc values differ on lines: "
+          else:
+            msg += "\n- torrc value differs on line: "
+
+          mismatch_lines.sort()
+          msg += ", ".join([str(val + 1) for val in mismatch_lines])
+
+        if missing_options:
+          if len(missing_options) > 1:
+            msg += "\n- configuration values are missing from the torrc: "
+          else:
+            msg += "\n- configuration value is missing from the torrc: "
+
+          missing_options.sort()
+          msg += ", ".join(missing_options)
+
+        log.warn(msg)
+
+
+def _test_config_descriptions():
+  """
+  Tester for the load_option_descriptions function, fetching the man page
+  contents and dumping its parsed results.
+  """
+
+  load_option_descriptions()
+  sorted_options = CONFIG_DESCRIPTIONS.keys()
+  sorted_options.sort()
+
+  for i in range(len(sorted_options)):
+    option = sorted_options[i]
+    argument, description = get_config_description(option)
+    opt_label = "OPTION: \"%s\"" % option
+    arg_label = "ARGUMENT: \"%s\"" % argument
+
+    print "     %-45s %s" % (opt_label, arg_label)
+    print "\"%s\"" % description
+
+    if i != len(sorted_options) - 1:
+      print "-" * 80
+
+
+def is_root_needed(torrc_path):
+  """
+  Returns True if the given torrc needs root permissions to be ran, False
+  otherwise. This raises an IOError if the torrc can't be read.
+
+  Arguments:
+    torrc_path - torrc to be checked
+  """
+
+  try:
+    torrc_file = open(torrc_path, "r")
+    torrc_lines = torrc_file.readlines()
+    torrc_file.close()
+
+    for line in torrc_lines:
+      line = line.strip()
+
+      is_port_opt = False
+
+      for opt in PORT_OPT:
+        if line.startswith(opt):
+          is_port_opt = True
+          break
+
+      if is_port_opt and " " in line:
+        arg = line.split(" ")[1]
+
+        if arg.isdigit() and int(arg) <= 1024 and int(arg) != 0:
+          return True
+
+    return False
+  except Exception as exc:
+    raise IOError(exc)
+
+
+def render_torrc(template, options, comment_indent = 30):
+  """
+  Uses the given template to generate a nicely formatted torrc with the given
+  options. The tempating language this recognizes is a simple one, recognizing
+  the following options:
+    [IF <option>]         # if <option> maps to true or a non-empty string
+    [IF NOT <option>]     # logical inverse
+    [IF <opt1> | <opt2>]  # logical or of the options
+    [ELSE]          # if the prior conditional evaluated to false
+    [END IF]        # ends the control block
+
+    [<option>]      # inputs the option value, omitting the line if it maps
+                    # to a boolean or empty string
+    [NEWLINE]       # empty line, otherwise templating white space is ignored
+
+  Arguments:
+    template      - torrc template lines used to generate the results
+    options       - mapping of keywords to their given values, with values
+                    being booleans or strings (possibly multi-line)
+    comment_indent - minimum column that comments align on
+  """
+
+  results = []
+  template_iter = iter(template)
+  comment_line_format = "%%-%is%%s" % comment_indent
+
+  try:
+    while True:
+      line = template_iter.next().strip()
+
+      if line.startswith("[IF ") and line.endswith("]"):
+        # checks if any of the conditional options are true or a non-empty string
+
+        evaluates_true = False
+
+        for cond in line[4:-1].split("|"):
+          is_inverse = False
+
+          if cond.startswith("NOT "):
+            is_inverse = True
+            cond = cond[4:]
+
+          if is_inverse != bool(options.get(cond.strip())):
+            evaluates_true = True
+            break
+
+        if evaluates_true:
+          continue
+        else:
+          # skips lines until we come to an else or the end of the block
+          depth = 0
+
+          while depth != -1:
+            line = template_iter.next().strip()
+
+            if line.startswith("[IF ") and line.endswith("]"):
+              depth += 1
+            elif line == "[END IF]":
+              depth -= 1
+            elif depth == 0 and line == "[ELSE]":
+              depth -= 1
+      elif line == "[ELSE]":
+        # an else block we aren't using - skip to the end of it
+        depth = 0
+
+        while depth != -1:
+          line = template_iter.next().strip()
+
+          if line.startswith("[IF "):
+            depth += 1
+          elif line == "[END IF]":
+            depth -= 1
+      elif line == "[NEWLINE]":
+        # explicit newline
+        results.append("")
+      elif line.startswith("#"):
+        # comment only
+        results.append(line)
+      elif line.startswith("[") and line.endswith("]"):
+        # completely dynamic entry
+
+        opt_value = options.get(line[1:-1])
+
+        if opt_value:
+          results.append(opt_value)
+      else:
+        # torrc option line
+
+        option, arg, comment = "", "", ""
+        parsed_line = line
+
+        if "#" in parsed_line:
+          parsed_line, comment = parsed_line.split("#", 1)
+          parsed_line = parsed_line.strip()
+          comment = "# %s" % comment.strip()
+
+        # parses the argument from the option
+
+        if " " in parsed_line.strip():
+          option, arg = parsed_line.split(" ", 1)
+          option = option.strip()
+        else:
+          log.info("torrc template option lacks an argument: '%s'" % line)
+          continue
+
+        # inputs dynamic arguments
+
+        if arg.startswith("[") and arg.endswith("]"):
+          arg = options.get(arg[1:-1])
+
+        # skips argument if it's false or an empty string
+
+        if not arg:
+          continue
+
+        torrc_entry = "%s %s" % (option, arg)
+
+        if comment:
+          results.append(comment_line_format % (torrc_entry + " ", comment))
+        else:
+          results.append(torrc_entry)
+  except StopIteration:
+    pass
+
+  return "\n".join(results)
+
+
+def load_configuration_descriptions(path_prefix):
+  """
+  Attempts to load descriptions for tor's configuration options, fetching them
+  from the man page and persisting them to a file to speed future startups.
+  """
+
+  # It is important that this is loaded before entering the curses context,
+  # otherwise the man call pegs the cpu for around a minute (I'm not sure
+  # why... curses must mess the terminal in a way that's important to man).
+
+  if CONFIG["features.config.descriptions.enabled"]:
+    is_config_descriptions_loaded = False
+
+    # determines the path where cached descriptions should be persisted (left
+    # undefined if caching is disabled)
+
+    descriptor_path = None
+
+    if CONFIG["features.config.descriptions.persist"]:
+      data_dir = CONFIG["startup.data_directory"]
+
+      if not data_dir.endswith("/"):
+        data_dir += "/"
+
+      descriptor_path = os.path.expanduser(data_dir + "cache/") + CONFIG_DESC_FILENAME
+
+    # attempts to load configuration descriptions cached in the data directory
+
+    if descriptor_path:
+      try:
+        load_start_time = time.time()
+        load_option_descriptions(descriptor_path)
+        is_config_descriptions_loaded = True
+
+        log.info(DESC_LOAD_SUCCESS_MSG % (descriptor_path, time.time() - load_start_time))
+      except IOError as exc:
+        log.info(DESC_LOAD_FAILED_MSG % exc.strerror)
+
+    # fetches configuration options from the man page
+
+    if not is_config_descriptions_loaded:
+      try:
+        load_start_time = time.time()
+        load_option_descriptions()
+        is_config_descriptions_loaded = True
+
+        log.info(DESC_READ_MAN_SUCCESS_MSG % (time.time() - load_start_time))
+      except IOError as exc:
+        log.notice(DESC_READ_MAN_FAILED_MSG % exc.strerror)
+
+      # persists configuration descriptions
+
+      if is_config_descriptions_loaded and descriptor_path:
+        try:
+          load_start_time = time.time()
+          save_option_descriptions(descriptor_path)
+          log.info(DESC_SAVE_SUCCESS_MSG % (descriptor_path, time.time() - load_start_time))
+        except IOError as exc:
+          log.notice(DESC_SAVE_FAILED_MSG % exc.strerror)
+        except OSError as exc:
+          log.notice(DESC_SAVE_FAILED_MSG % exc)
+
+    # finally fall back to the cached descriptors provided with arm (this is
+    # often the case for tbb and manual builds)
+
+    if not is_config_descriptions_loaded:
+      try:
+        load_start_time = time.time()
+        loaded_version = load_option_descriptions("%sresources/%s" % (path_prefix, CONFIG_DESC_FILENAME), False)
+        is_config_descriptions_loaded = True
+        log.notice(DESC_INTERNAL_LOAD_SUCCESS_MSG % loaded_version)
+      except IOError as exc:
+        log.error(DESC_INTERNAL_LOAD_FAILED_MSG % exc.strerror)
diff --git a/arm/util/tor_tools.py b/arm/util/tor_tools.py
new file mode 100644
index 0000000..a7945fd
--- /dev/null
+++ b/arm/util/tor_tools.py
@@ -0,0 +1,1039 @@
+"""
+Helper for working with an active tor process. This both provides a wrapper for
+accessing stem and notifications of state changes to subscribers.
+"""
+
+import math
+import os
+import threading
+import time
+
+import stem
+import stem.control
+
+from stem.util import log, proc, system
+
+CONTROLLER = None  # singleton Controller instance
+
+UNDEFINED = "<Undefined_ >"
+
+
+def get_conn():
+  """
+  Singleton constructor for a Controller. Be aware that this starts as being
+  uninitialized, needing a stem Controller before it's fully functional.
+  """
+
+  global CONTROLLER
+
+  if CONTROLLER is None:
+    CONTROLLER = Controller()
+
+  return CONTROLLER
+
+
+class Controller:
+  """
+  Stem wrapper providing convenience functions (mostly from the days of using
+  TorCtl), listener functionality for tor's state, and the capability for
+  controller connections to be restarted if closed.
+  """
+
+  def __init__(self):
+    self.controller = None
+    self.conn_lock = threading.RLock()
+    self._fingerprint_mappings = None     # mappings of ip -> [(port, fingerprint), ...]
+    self._fingerprint_lookup_cache = {}   # lookup cache with (ip, port) -> fingerprint mappings
+    self._nickname_lookup_cache = {}      # lookup cache with fingerprint -> nickname mappings
+    self._address_lookup_cache = {}       # lookup cache with fingerprint -> (ip address, or port) mappings
+    self._consensus_lookup_cache = {}     # lookup cache with network status entries
+    self._descriptor_lookup_cache = {}    # lookup cache with relay descriptors
+    self._last_newnym = 0                 # time we last sent a NEWNYM signal
+
+  def init(self, controller):
+    """
+    Uses the given stem instance for future operations, notifying listeners
+    about the change.
+
+    Arguments:
+      controller - stem based Controller instance
+    """
+
+    # TODO: We should reuse our controller instance so event listeners will be
+    # re-attached. This is a point of regression until we do... :(
+
+    if controller.is_alive() and controller != self.controller:
+      self.conn_lock.acquire()
+
+      if self.controller:
+        self.close()  # shut down current connection
+
+      self.controller = controller
+      log.info("Stem connected to tor version %s" % self.controller.get_version())
+
+      self.controller.add_event_listener(self.ns_event, stem.control.EventType.NS)
+      self.controller.add_event_listener(self.new_consensus_event, stem.control.EventType.NEWCONSENSUS)
+      self.controller.add_event_listener(self.new_desc_event, stem.control.EventType.NEWDESC)
+
+      # reset caches for ip -> fingerprint lookups
+
+      self._fingerprint_mappings = None
+      self._fingerprint_lookup_cache = {}
+      self._nickname_lookup_cache = {}
+      self._address_lookup_cache = {}
+      self._consensus_lookup_cache = {}
+      self._descriptor_lookup_cache = {}
+
+      # time that we sent our last newnym signal
+
+      self._last_newnym = 0
+
+      self.conn_lock.release()
+
+  def close(self):
+    """
+    Closes the current stem instance and notifies listeners.
+    """
+
+    self.conn_lock.acquire()
+
+    if self.controller:
+      self.controller.close()
+
+    self.conn_lock.release()
+
+  def get_controller(self):
+    return self.controller
+
+  def is_alive(self):
+    """
+    Returns True if this has been initialized with a working stem instance,
+    False otherwise.
+    """
+
+    self.conn_lock.acquire()
+
+    result = False
+
+    if self.controller:
+      if self.controller.is_alive():
+        result = True
+      else:
+        self.close()
+
+    self.conn_lock.release()
+    return result
+
+  def get_info(self, param, default = UNDEFINED):
+    """
+    Queries the control port for the given GETINFO option, providing the
+    default if the response is undefined or fails for any reason (error
+    response, control port closed, initiated, etc).
+
+    Arguments:
+      param   - GETINFO option to be queried
+      default - result if the query fails
+    """
+
+    self.conn_lock.acquire()
+
+    try:
+      if not self.is_alive():
+        if default != UNDEFINED:
+          return default
+        else:
+          raise stem.SocketClosed()
+
+      if default != UNDEFINED:
+        return self.controller.get_info(param, default)
+      else:
+        return self.controller.get_info(param)
+    except stem.SocketClosed as exc:
+      self.close()
+      raise exc
+    finally:
+      self.conn_lock.release()
+
+  def get_option(self, param, default = UNDEFINED, multiple = False):
+    """
+    Queries the control port for the given configuration option, providing the
+    default if the response is undefined or fails for any reason. If multiple
+    values exist then this arbitrarily returns the first unless the multiple
+    flag is set.
+
+    Arguments:
+      param     - configuration option to be queried
+      default   - result if the query fails
+      multiple  - provides a list with all returned values if true, otherwise
+                  this just provides the first result
+    """
+
+    self.conn_lock.acquire()
+
+    try:
+      if not self.is_alive():
+        if default != UNDEFINED:
+          return default
+        else:
+          raise stem.SocketClosed()
+
+      if default != UNDEFINED:
+        return self.controller.get_conf(param, default, multiple)
+      else:
+        return self.controller.get_conf(param, multiple = multiple)
+    except stem.SocketClosed as exc:
+      self.close()
+      raise exc
+    finally:
+      self.conn_lock.release()
+
+  def set_option(self, param, value = None):
+    """
+    Issues a SETCONF to set the given option/value pair. An exeptions raised
+    if it fails to be set. If no value is provided then this sets the option to
+    0 or NULL.
+
+    Arguments:
+      param - configuration option to be set
+      value - value to set the parameter to (this can be either a string or a
+              list of strings)
+    """
+
+    self.conn_lock.acquire()
+
+    try:
+      if not self.is_alive():
+        raise stem.SocketClosed()
+
+      self.controller.set_conf(param, value)
+    except stem.SocketClosed as exc:
+      self.close()
+      raise exc
+    finally:
+      self.conn_lock.release()
+
+  def save_conf(self):
+    """
+    Calls tor's SAVECONF method.
+    """
+
+    self.conn_lock.acquire()
+
+    if self.is_alive():
+      self.controller.save_conf()
+
+    self.conn_lock.release()
+
+  def send_newnym(self):
+    """
+    Sends a newnym request to Tor. These are rate limited so if it occures
+    more than once within a ten second window then the second is delayed.
+    """
+
+    self.conn_lock.acquire()
+
+    if self.is_alive():
+      self._last_newnym = time.time()
+      self.controller.signal(stem.Signal.NEWNYM)
+
+    self.conn_lock.release()
+
+  def is_newnym_available(self):
+    """
+    True if Tor will immediately respect a newnym request, false otherwise.
+    """
+
+    if self.is_alive():
+      return self.get_newnym_wait() == 0
+    else:
+      return False
+
+  def get_newnym_wait(self):
+    """
+    Provides the number of seconds until a newnym signal would be respected.
+    """
+
+    # newnym signals can occure at the rate of one every ten seconds
+    # TODO: this can't take other controllers into account :(
+
+    return max(0, math.ceil(self._last_newnym + 10 - time.time()))
+
+  def get_circuits(self, default = []):
+    """
+    This provides a list with tuples of the form:
+    (circuit_id, status, purpose, (fingerprint1, fingerprint2...))
+
+    Arguments:
+      default - value provided back if unable to query the circuit-status
+    """
+
+    # TODO: We're losing caching around this. We should check to see the call
+    # volume of this and probably add it to stem.
+
+    results = []
+
+    for entry in self.controller.get_circuits():
+      fingerprints = []
+
+      for fp, nickname in entry.path:
+        if not fp:
+          consensus_entry = self.controller.get_network_status(nickname, None)
+
+          if consensus_entry:
+            fp = consensus_entry.fingerprint
+
+          # It shouldn't be possible for this lookup to fail, but we
+          # need to fill something (callers won't expect our own client
+          # paths to have unknown relays). If this turns out to be wrong
+          # then log a warning.
+
+          if not fp:
+            log.warn("Unable to determine the fingerprint for a relay in our own circuit: %s" % nickname)
+            fp = "0" * 40
+
+        fingerprints.append(fp)
+
+      results.append((int(entry.id), entry.status, entry.purpose, fingerprints))
+
+    if results:
+      return results
+    else:
+      return default
+
+  def get_hidden_service_ports(self, default = []):
+    """
+    Provides the target ports hidden services are configured to use.
+
+    Arguments:
+      default - value provided back if unable to query the hidden service ports
+    """
+
+    result = []
+    hs_options = self.controller.get_conf_map("HiddenServiceOptions", {})
+
+    for entry in hs_options.get("HiddenServicePort", []):
+      # HiddenServicePort entries are of the form...
+      #
+      #   VIRTPORT [TARGET]
+      #
+      # ... with the TARGET being an address, port, or address:port. If the
+      # target port isn't defined then uses the VIRTPORT.
+
+      hs_port = None
+
+      if ' ' in entry:
+        virtport, target = entry.split(' ', 1)
+
+        if ':' in target:
+          hs_port = target.split(':', 1)[1]  # target is an address:port
+        elif target.isdigit():
+          hs_port = target  # target is a port
+        else:
+          hs_port = virtport  # target is an address
+      else:
+        hs_port = entry  # just has the virtual port
+
+      if hs_port.isdigit():
+        result.append(hs_port)
+
+    if result:
+      return result
+    else:
+      return default
+
+  def get_my_bandwidth_rate(self, default = None):
+    """
+    Provides the effective relaying bandwidth rate of this relay. Currently
+    this doesn't account for SETCONF events.
+
+    Arguments:
+      default - result if the query fails
+    """
+
+    # effective relayed bandwidth is the minimum of BandwidthRate,
+    # MaxAdvertisedBandwidth, and RelayBandwidthRate (if set)
+
+    effective_rate = int(self.get_option("BandwidthRate", None))
+
+    relay_rate = self.get_option("RelayBandwidthRate", None)
+
+    if relay_rate and relay_rate != "0":
+      effective_rate = min(effective_rate, int(relay_rate))
+
+    max_advertised = self.get_option("MaxAdvertisedBandwidth", None)
+
+    if max_advertised:
+      effective_rate = min(effective_rate, int(max_advertised))
+
+    if effective_rate is not None:
+      return effective_rate
+    else:
+      return default
+
+  def get_my_bandwidth_burst(self, default = None):
+    """
+    Provides the effective bandwidth burst rate of this relay. Currently this
+    doesn't account for SETCONF events.
+
+    Arguments:
+      default - result if the query fails
+    """
+
+    # effective burst (same for BandwidthBurst and RelayBandwidthBurst)
+    effective_burst = int(self.get_option("BandwidthBurst", None))
+
+    relay_burst = self.get_option("RelayBandwidthBurst", None)
+
+    if relay_burst and relay_burst != "0":
+      effective_burst = min(effective_burst, int(relay_burst))
+
+    if effective_burst is not None:
+      return effective_burst
+    else:
+      return default
+
+  def get_my_bandwidth_observed(self, default = None):
+    """
+    Provides the relay's current observed bandwidth (the throughput determined
+    from historical measurements on the client side). This is used in the
+    heuristic used for path selection if the measured bandwidth is undefined.
+    This is fetched from the descriptors and hence will get stale if
+    descriptors aren't periodically updated.
+
+    Arguments:
+      default - result if the query fails
+    """
+
+    my_fingerprint = self.get_info("fingerprint", None)
+
+    if my_fingerprint:
+      my_descriptor = self.controller.get_server_descriptor(my_fingerprint)
+
+      if my_descriptor:
+        return my_descriptor.observed_bandwidth
+
+    return default
+
+  def get_my_bandwidth_measured(self, default = None):
+    """
+    Provides the relay's current measured bandwidth (the throughput as noted by
+    the directory authorities and used by clients for relay selection). This is
+    undefined if not in the consensus or with older versions of Tor. Depending
+    on the circumstances this can be from a variety of things (observed,
+    measured, weighted measured, etc) as described by:
+    https://trac.torproject.org/projects/tor/ticket/1566
+
+    Arguments:
+      default - result if the query fails
+    """
+
+    # TODO: Tor is documented as providing v2 router status entries but
+    # actually looks to be v3. This needs to be sorted out between stem
+    # and tor.
+
+    my_fingerprint = self.get_info("fingerprint", None)
+
+    if my_fingerprint:
+      my_status_entry = self.controller.get_network_status(my_fingerprint)
+
+      if my_status_entry and hasattr(my_status_entry, 'bandwidth'):
+        return my_status_entry.bandwidth
+
+    return default
+
+  def get_my_flags(self, default = None):
+    """
+    Provides the flags held by this relay.
+
+    Arguments:
+      default - result if the query fails or this relay isn't a part of the consensus yet
+    """
+
+    my_fingerprint = self.get_info("fingerprint", None)
+
+    if my_fingerprint:
+      my_status_entry = self.controller.get_network_status(my_fingerprint)
+
+      if my_status_entry:
+        return my_status_entry.flags
+
+    return default
+
+  def get_version(self):
+    """
+    Provides the version of our tor instance, this is None if we don't have a
+    connection.
+    """
+
+    self.conn_lock.acquire()
+
+    try:
+      return self.controller.get_version()
+    except stem.SocketClosed:
+      self.close()
+      return None
+    except:
+      return None
+    finally:
+      self.conn_lock.release()
+
+  def is_geoip_unavailable(self):
+    """
+    Provides true if we've concluded that our geoip database is unavailable,
+    false otherwise.
+    """
+
+    if self.is_alive():
+      return self.controller.is_geoip_unavailable()
+    else:
+      return False
+
+  def get_my_user(self):
+    """
+    Provides the user this process is running under. If unavailable this
+    provides None.
+    """
+
+    return self.controller.get_user(None)
+
+  def get_my_file_descriptor_usage(self):
+    """
+    Provides the number of file descriptors currently being used by this
+    process. This returns None if this can't be determined.
+    """
+
+    # The file descriptor usage is the size of the '/proc/<pid>/fd' contents
+    # http://linuxshellaccount.blogspot.com/2008/06/finding-number-of-open-file-descriptors.html
+    # I'm not sure about other platforms (like BSD) so erroring out there.
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive() and proc.is_available():
+      my_pid = self.controller.get_pid(None)
+
+      if my_pid:
+        try:
+          result = len(os.listdir("/proc/%s/fd" % my_pid))
+        except:
+          pass
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_my_file_descriptor_limit(self):
+    """
+    Provides the maximum number of file descriptors this process can have.
+    Only the Tor process itself reliably knows this value, and the option for
+    getting this was added in Tor 0.2.3.x-final. If that's unavailable then
+    we can only estimate the file descriptor limit based on other factors.
+
+    The return result is a tuple of the form:
+    (fileDescLimit, isEstimate)
+    and if all methods fail then both values are None.
+    """
+
+    # provides -1 if the query fails
+    queried_limit = self.get_info("process/descriptor-limit", None)
+
+    if queried_limit is not None and queried_limit != "-1":
+      return (int(queried_limit), False)
+
+    tor_user = self.get_my_user()
+
+    # This is guessing the open file limit. Unfortunately there's no way
+    # (other than "/usr/proc/bin/pfiles pid | grep rlimit" under Solaris)
+    # to get the file descriptor limit for an arbitrary process.
+
+    if tor_user == "debian-tor":
+      # probably loaded via /etc/init.d/tor which changes descriptor limit
+      return (8192, True)
+    else:
+      # uses ulimit to estimate (-H is for hard limit, which is what tor uses)
+      ulimit_results = system.call("ulimit -Hn")
+
+      if ulimit_results:
+        ulimit = ulimit_results[0].strip()
+
+        if ulimit.isdigit():
+          return (int(ulimit), True)
+
+    return (None, None)
+
+  def get_start_time(self):
+    """
+    Provides the unix time for when the tor process first started. If this
+    can't be determined then this provides None.
+    """
+
+    try:
+      return system.get_start_time(self.controller.get_pid())
+    except:
+      return None
+
+  def is_exiting_allowed(self, ip_address, port):
+    """
+    Checks if the given destination can be exited to by this relay, returning
+    True if so and False otherwise.
+    """
+
+    self.conn_lock.acquire()
+
+    result = False
+
+    if self.is_alive():
+      # If we allow any exiting then this could be relayed DNS queries,
+      # otherwise the policy is checked. Tor still makes DNS connections to
+      # test when exiting isn't allowed, but nothing is relayed over them.
+      # I'm registering these as non-exiting to avoid likely user confusion:
+      # https://trac.torproject.org/projects/tor/ticket/965
+
+      our_policy = self.get_exit_policy()
+
+      if our_policy and our_policy.is_exiting_allowed() and port == "53":
+        result = True
+      else:
+        result = our_policy and our_policy.can_exit_to(ip_address, port)
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_exit_policy(self):
+    """
+    Provides an ExitPolicy instance for the head of this relay's exit policy
+    chain. If there's no active connection then this provides None.
+    """
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive():
+      try:
+        result = self.controller.get_exit_policy(None)
+      except:
+        pass
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_consensus_entry(self, relay_fingerprint):
+    """
+    Provides the most recently available consensus information for the given
+    relay. This is none if no such information exists.
+
+    Arguments:
+      relay_fingerprint - fingerprint of the relay
+    """
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive():
+      if not relay_fingerprint in self._consensus_lookup_cache:
+        ns_entry = self.get_info("ns/id/%s" % relay_fingerprint, None)
+        self._consensus_lookup_cache[relay_fingerprint] = ns_entry
+
+      result = self._consensus_lookup_cache[relay_fingerprint]
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_descriptor_entry(self, relay_fingerprint):
+    """
+    Provides the most recently available descriptor information for the given
+    relay. Unless FetchUselessDescriptors is set this may frequently be
+    unavailable. If no such descriptor is available then this returns None.
+
+    Arguments:
+      relay_fingerprint - fingerprint of the relay
+    """
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive():
+      if not relay_fingerprint in self._descriptor_lookup_cache:
+        desc_entry = self.get_info("desc/id/%s" % relay_fingerprint, None)
+        self._descriptor_lookup_cache[relay_fingerprint] = desc_entry
+
+      result = self._descriptor_lookup_cache[relay_fingerprint]
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_relay_fingerprint(self, relay_address, relay_port = None, get_all_matches = False):
+    """
+    Provides the fingerprint associated with the given address. If there's
+    multiple potential matches or the mapping is unknown then this returns
+    None. This disambiguates the fingerprint if there's multiple relays on
+    the same ip address by several methods, one of them being to pick relays
+    we have a connection with.
+
+    Arguments:
+      relay_address  - address of relay to be returned
+      relay_port     - orport of relay (to further narrow the results)
+      get_all_matches - ignores the relay_port and provides all of the
+                      (port, fingerprint) tuples matching the given
+                      address
+    """
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive():
+      if get_all_matches:
+        # populates the ip -> fingerprint mappings if not yet available
+        if self._fingerprint_mappings is None:
+          self._fingerprint_mappings = self._get_fingerprint_mappings()
+
+        if relay_address in self._fingerprint_mappings:
+          result = self._fingerprint_mappings[relay_address]
+        else:
+          result = []
+      else:
+        # query the fingerprint if it isn't yet cached
+        if not (relay_address, relay_port) in self._fingerprint_lookup_cache:
+          relay_fingerprint = self._get_relay_fingerprint(relay_address, relay_port)
+          self._fingerprint_lookup_cache[(relay_address, relay_port)] = relay_fingerprint
+
+        result = self._fingerprint_lookup_cache[(relay_address, relay_port)]
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_relay_nickname(self, relay_fingerprint):
+    """
+    Provides the nickname associated with the given relay. This provides None
+    if no such relay exists, and "Unnamed" if the name hasn't been set.
+
+    Arguments:
+      relay_fingerprint - fingerprint of the relay
+    """
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive():
+      # query the nickname if it isn't yet cached
+      if not relay_fingerprint in self._nickname_lookup_cache:
+        if relay_fingerprint == self.get_info("fingerprint", None):
+          # this is us, simply check the config
+          my_nickname = self.get_option("Nickname", "Unnamed")
+          self._nickname_lookup_cache[relay_fingerprint] = my_nickname
+        else:
+          ns_entry = self.controller.get_network_status(relay_fingerprint, None)
+
+          if ns_entry:
+            self._nickname_lookup_cache[relay_fingerprint] = ns_entry.nickname
+
+      result = self._nickname_lookup_cache[relay_fingerprint]
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_relay_exit_policy(self, relay_fingerprint):
+    """
+    Provides the ExitPolicy instance associated with the given relay. The tor
+    consensus entries don't indicate if private addresses are rejected or
+    address-specific policies, so this is only used as a fallback if a recent
+    descriptor is unavailable. This returns None if unable to determine the
+    policy.
+
+    Arguments:
+      relay_fingerprint - fingerprint of the relay
+    """
+
+    self.conn_lock.acquire()
+
+    result = None
+
+    if self.is_alive():
+      # attempts to fetch the policy via the descriptor
+      descriptor = self.controller.get_server_descriptor(relay_fingerprint, None)
+
+      if descriptor:
+        result = descriptor.exit_policy
+
+    self.conn_lock.release()
+
+    return result
+
+  def get_relay_address(self, relay_fingerprint, default = None):
+    """
+    Provides the (IP Address, ORPort) tuple for a given relay. If the lookup
+    fails then this returns the default.
+
+    Arguments:
+      relay_fingerprint - fingerprint of the relay
+    """
+
+    self.conn_lock.acquire()
+
+    result = default
+
+    if self.is_alive():
+      # query the address if it isn't yet cached
+      if not relay_fingerprint in self._address_lookup_cache:
+        if relay_fingerprint == self.get_info("fingerprint", None):
+          # this is us, simply check the config
+          my_address = self.get_info("address", None)
+          my_or_port = self.get_option("ORPort", None)
+
+          if my_address and my_or_port:
+            self._address_lookup_cache[relay_fingerprint] = (my_address, my_or_port)
+        else:
+          # check the consensus for the relay
+          ns_entry = self.get_consensus_entry(relay_fingerprint)
+
+          if ns_entry:
+            ns_line_comp = ns_entry.split("\n")[0].split(" ")
+
+            if len(ns_line_comp) >= 8:
+              self._address_lookup_cache[relay_fingerprint] = (ns_line_comp[6], ns_line_comp[7])
+
+      result = self._address_lookup_cache.get(relay_fingerprint, default)
+
+    self.conn_lock.release()
+
+    return result
+
+  def add_event_listener(self, listener, *event_types):
+    """
+    Directs further tor controller events to callback functions of the
+    listener. If a new control connection is initialized then this listener is
+    reattached.
+    """
+
+    self.conn_lock.acquire()
+
+    if self.is_alive():
+      self.controller.add_event_listener(listener, *event_types)
+
+    self.conn_lock.release()
+
+  def remove_event_listener(self, listener):
+    """
+    Stops the given event listener from being notified of further events.
+    """
+
+    self.conn_lock.acquire()
+
+    if self.is_alive():
+      self.controller.remove_event_listener(listener)
+
+    self.conn_lock.release()
+
+  def add_status_listener(self, callback):
+    """
+    Directs further events related to tor's controller status to the callback
+    function.
+
+    Arguments:
+      callback - functor that'll accept the events, expected to be of the form:
+                 myFunction(controller, event_type)
+    """
+
+    self.controller.add_status_listener(callback)
+
+  def reload(self):
+    """
+    This resets tor (sending a RELOAD signal to the control port) causing tor's
+    internal state to be reset and the torrc reloaded.
+    """
+
+    self.conn_lock.acquire()
+
+    try:
+      if self.is_alive():
+        try:
+          self.controller.signal(stem.Signal.RELOAD)
+        except Exception as exc:
+          # new torrc parameters caused an error (tor's likely shut down)
+          raise IOError(str(exc))
+    finally:
+      self.conn_lock.release()
+
+  def shutdown(self, force = False):
+    """
+    Sends a shutdown signal to the attached tor instance. For relays the
+    actual shutdown is delayed for thirty seconds unless the force flag is
+    given. This raises an IOError if a signal is sent but fails.
+
+    Arguments:
+      force - triggers an immediate shutdown for relays if True
+    """
+
+    self.conn_lock.acquire()
+
+    raised_exception = None
+
+    if self.is_alive():
+      try:
+        is_relay = self.get_option("ORPort", None) is not None
+
+        if force:
+          self.controller.signal(stem.Signal.HALT)
+        else:
+          self.controller.signal(stem.Signal.SHUTDOWN)
+
+        # shuts down control connection if we aren't making a delayed shutdown
+
+        if force or not is_relay:
+          self.close()
+      except Exception as exc:
+        raised_exception = IOError(str(exc))
+
+    self.conn_lock.release()
+
+    if raised_exception:
+      raise raised_exception
+
+  def ns_event(self, event):
+    self._consensus_lookup_cache = {}
+
+  def new_consensus_event(self, event):
+    self.conn_lock.acquire()
+
+    # reconstructs consensus based mappings
+
+    self._fingerprint_lookup_cache = {}
+    self._nickname_lookup_cache = {}
+    self._address_lookup_cache = {}
+    self._consensus_lookup_cache = {}
+
+    if self._fingerprint_mappings is not None:
+      self._fingerprint_mappings = self._get_fingerprint_mappings(event.desc)
+
+    self.conn_lock.release()
+
+  def new_desc_event(self, event):
+    self.conn_lock.acquire()
+
+    desc_fingerprints = [fingerprint for (fingerprint, nickname) in event.relays]
+
+    # If we're tracking ip address -> fingerprint mappings then update with
+    # the new relays.
+
+    self._fingerprint_lookup_cache = {}
+    self._descriptor_lookup_cache = {}
+
+    if self._fingerprint_mappings is not None:
+      for fingerprint in desc_fingerprints:
+        # gets consensus data for the new descriptor
+
+        try:
+          desc = self.controller.get_network_status(fingerprint)
+        except stem.ControllerError:
+          continue
+
+        # updates fingerprintMappings with new data
+
+        if desc.address in self._fingerprint_mappings:
+          # if entry already exists with the same orport, remove it
+
+          orport_match = None
+
+          for entry_port, entry_fingerprint in self._fingerprint_mappings[desc.address]:
+            if entry_port == desc.or_port:
+              orport_match = (entry_port, entry_fingerprint)
+              break
+
+          if orport_match:
+            self._fingerprint_mappings[desc.address].remove(orport_match)
+
+          # add the new entry
+
+          self._fingerprint_mappings[desc.address].append((desc.or_port, desc.fingerprint))
+        else:
+          self._fingerprint_mappings[desc.address] = [(desc.or_port, desc.fingerprint)]
+
+    self.conn_lock.release()
+
+  def _get_fingerprint_mappings(self, descriptors = None):
+    """
+    Provides IP address to (port, fingerprint) tuple mappings for all of the
+    currently cached relays.
+
+    Arguments:
+      descriptors - router status entries (fetched if not provided)
+    """
+
+    results = {}
+
+    if self.is_alive():
+      # fetch the current network status if not provided
+
+      if not descriptors:
+        try:
+          descriptors = self.controller.get_network_statuses()
+        except stem.ControllerError:
+          descriptors = []
+
+      # construct mappings of ips to relay data
+
+      for desc in descriptors:
+        results.setdefault(desc.address, []).append((desc.or_port, desc.fingerprint))
+
+    return results
+
+  def _get_relay_fingerprint(self, relay_address, relay_port):
+    """
+    Provides the fingerprint associated with the address/port combination.
+
+    Arguments:
+      relay_address - address of relay to be returned
+      relay_port    - orport of relay (to further narrow the results)
+    """
+
+    # If we were provided with a string port then convert to an int (so
+    # lookups won't mismatch based on type).
+
+    if isinstance(relay_port, str):
+      relay_port = int(relay_port)
+
+    # checks if this matches us
+
+    if relay_address == self.get_info("address", None):
+      if not relay_port or relay_port == self.get_option("ORPort", None):
+        return self.get_info("fingerprint", None)
+
+    # if we haven't yet populated the ip -> fingerprint mappings then do so
+
+    if self._fingerprint_mappings is None:
+      self._fingerprint_mappings = self._get_fingerprint_mappings()
+
+    potential_matches = self._fingerprint_mappings.get(relay_address)
+
+    if not potential_matches:
+      return None  # no relay matches this ip address
+
+    if len(potential_matches) == 1:
+      # There's only one relay belonging to this ip address. If the port
+      # matches then we're done.
+
+      match = potential_matches[0]
+
+      if relay_port and match[0] != relay_port:
+        return None
+      else:
+        return match[1]
+    elif relay_port:
+      # Multiple potential matches, so trying to match based on the port.
+      for entry_port, entry_fingerprint in potential_matches:
+        if entry_port == relay_port:
+          return entry_fingerprint
+
+    return None
diff --git a/arm/util/uiTools.py b/arm/util/uiTools.py
deleted file mode 100644
index 7a1a601..0000000
--- a/arm/util/uiTools.py
+++ /dev/null
@@ -1,628 +0,0 @@
-"""
-Toolkit for common ui tasks when working with curses. This provides a quick and
-easy method of providing the following interface components:
-- preinitialized curses color attributes
-- unit conversion for labels
-"""
-
-import sys
-import curses
-
-from curses.ascii import isprint
-
-from stem.util import conf, enum, log, system
-
-# colors curses can handle
-COLOR_LIST = {
-  "red": curses.COLOR_RED,
-  "green": curses.COLOR_GREEN,
-  "yellow": curses.COLOR_YELLOW,
-  "blue": curses.COLOR_BLUE,
-  "cyan": curses.COLOR_CYAN,
-  "magenta": curses.COLOR_MAGENTA,
-  "black": curses.COLOR_BLACK,
-  "white": curses.COLOR_WHITE,
-}
-
-# boolean for if we have color support enabled, None not yet determined
-COLOR_IS_SUPPORTED = None
-
-# mappings for get_color() - this uses the default terminal color scheme if
-# color support is unavailable
-COLOR_ATTR_INITIALIZED = False
-COLOR_ATTR = dict([(color, 0) for color in COLOR_LIST])
-
-Ending = enum.Enum("ELLIPSE", "HYPHEN")
-SCROLL_KEYS = (curses.KEY_UP, curses.KEY_DOWN, curses.KEY_PPAGE, curses.KEY_NPAGE, curses.KEY_HOME, curses.KEY_END)
-
-
-def conf_handler(key, value):
-  if key == "features.color_override" and value != "none":
-    try:
-      set_color_override(value)
-    except ValueError as exc:
-      log.notice(exc)
-
-
-CONFIG = conf.config_dict("arm", {
-  "features.color_override": "none",
-  "features.colorInterface": True,
-  "features.acsSupport": True,
-}, conf_handler)
-
-
-def demo_glyphs():
-  """
-  Displays all ACS options with their corresponding representation. These are
-  undocumented in the pydocs. For more information see the following man page:
-  http://www.mkssoftware.com/docs/man5/terminfo.5.asp
-  """
-
-  try:
-    curses.wrapper(_show_glyphs)
-  except KeyboardInterrupt:
-    pass  # quit
-
-
-def _show_glyphs(stdscr):
-  """
-  Renders a chart with the ACS glyphs.
-  """
-
-  # allows things like semi-transparent backgrounds
-
-  try:
-    curses.use_default_colors()
-  except curses.error:
-    pass
-
-  # attempts to make the cursor invisible
-
-  try:
-    curses.curs_set(0)
-  except curses.error:
-    pass
-
-  acs_options = [item for item in curses.__dict__.items() if item[0].startswith("ACS_")]
-  acs_options.sort(key=lambda i: (i[1]))  # order by character codes
-
-  # displays a chart with all the glyphs and their representations
-
-  height, width = stdscr.getmaxyx()
-
-  if width < 30:
-    return  # not enough room to show a column
-
-  columns = width / 30
-
-  # display title
-
-  stdscr.addstr(0, 0, "Curses Glyphs:", curses.A_STANDOUT)
-
-  x, y = 0, 1
-
-  while acs_options:
-    name, keycode = acs_options.pop(0)
-    stdscr.addstr(y, x * 30, "%s (%i)" % (name, keycode))
-    stdscr.addch(y, (x * 30) + 25, keycode)
-
-    x += 1
-
-    if x >= columns:
-      x, y = 0, y + 1
-
-      if y >= height:
-        break
-
-  stdscr.getch()  # quit on keyboard input
-
-
-def get_printable(line, keep_newlines = True):
-  """
-  Provides the line back with non-printable characters stripped.
-
-  Arguments:
-    line          - string to be processed
-    stripNewlines - retains newlines if true, stripped otherwise
-  """
-
-  line = line.replace('\xc2', "'")
-  line = "".join([char for char in line if (isprint(char) or (keep_newlines and char == "\n"))])
-
-  return line
-
-
-def is_color_supported():
-  """
-  True if the display supports showing color, false otherwise.
-  """
-
-  if COLOR_IS_SUPPORTED is None:
-    _init_colors()
-
-  return COLOR_IS_SUPPORTED
-
-
-def get_color(color):
-  """
-  Provides attribute corresponding to a given text color. Supported colors
-  include:
-  red       green     yellow    blue
-  cyan      magenta   black     white
-
-  If color support isn't available or colors can't be initialized then this uses the
-  terminal's default coloring scheme.
-
-  Arguments:
-    color - name of the foreground color to be returned
-  """
-
-  color_override = get_color_override()
-
-  if color_override:
-    color = color_override
-
-  if not COLOR_ATTR_INITIALIZED:
-    _init_colors()
-
-  return COLOR_ATTR[color]
-
-
-def set_color_override(color = None):
-  """
-  Overwrites all requests for color with the given color instead. This raises
-  a ValueError if the color is invalid.
-
-  Arguments:
-    color - name of the color to overwrite requests with, None to use normal
-            coloring
-  """
-
-  if color is None:
-    CONFIG["features.color_override"] = "none"
-  elif color in COLOR_LIST.keys():
-    CONFIG["features.color_override"] = color
-  else:
-    raise ValueError("\"%s\" isn't a valid color" % color)
-
-
-def get_color_override():
-  """
-  Provides the override color used by the interface, None if it isn't set.
-  """
-
-  color_override = CONFIG.get("features.color_override", "none")
-
-  if color_override == "none":
-    return None
-  else:
-    return color_override
-
-
-def crop_str(msg, size, min_word_length = 4, min_crop = 0, end_type = Ending.ELLIPSE, get_remainder = False):
-  """
-  Provides the msg constrained to the given length, truncating on word breaks.
-  If the last words is long this truncates mid-word with an ellipse. If there
-  isn't room for even a truncated single word (or one word plus the ellipse if
-  including those) then this provides an empty string. If a cropped string ends
-  with a comma or period then it's stripped (unless we're providing the
-  remainder back). Examples:
-
-  crop_str("This is a looooong message", 17)
-  "This is a looo..."
-
-  crop_str("This is a looooong message", 12)
-  "This is a..."
-
-  crop_str("This is a looooong message", 3)
-  ""
-
-  Arguments:
-    msg             - source text
-    size            - room available for text
-    min_word_length - minimum characters before which a word is dropped, requires
-                      whole word if None
-    min_crop        - minimum characters that must be dropped if a word's cropped
-    end_type        - type of ending used when truncating:
-                      None - blank ending
-                      Ending.ELLIPSE - includes an ellipse
-                      Ending.HYPHEN - adds hyphen when breaking words
-    get_remainder   - returns a tuple instead, with the second part being the
-                      cropped portion of the message
-  """
-
-  # checks if there's room for the whole message
-
-  if len(msg) <= size:
-    if get_remainder:
-      return (msg, "")
-    else:
-      return msg
-
-  # avoids negative input
-
-  size = max(0, size)
-
-  if min_word_length is not None:
-    min_word_length = max(0, min_word_length)
-
-  min_crop = max(0, min_crop)
-
-  # since we're cropping, the effective space available is less with an
-  # ellipse, and cropping words requires an extra space for hyphens
-
-  if end_type == Ending.ELLIPSE:
-    size -= 3
-  elif end_type == Ending.HYPHEN and min_word_length is not None:
-    min_word_length += 1
-
-  # checks if there isn't the minimum space needed to include anything
-
-  last_wordbreak = msg.rfind(" ", 0, size + 1)
-
-  if last_wordbreak == -1:
-    # we're splitting the first word
-
-    if min_word_length is None or size < min_word_length:
-      if get_remainder:
-        return ("", msg)
-      else:
-        return ""
-
-    include_crop = True
-  else:
-    last_wordbreak = len(msg[:last_wordbreak].rstrip())  # drops extra ending whitespaces
-
-    if (min_word_length is not None and size < min_word_length) or (min_word_length is None and last_wordbreak < 1):
-      if get_remainder:
-        return ("", msg)
-      else:
-        return ""
-
-    if min_word_length is None:
-      min_word_length = sys.maxint
-
-    include_crop = size - last_wordbreak - 1 >= min_word_length
-
-  # if there's a max crop size then make sure we're cropping at least that many characters
-
-  if include_crop and min_crop:
-    next_wordbreak = msg.find(" ", size)
-
-    if next_wordbreak == -1:
-      next_wordbreak = len(msg)
-
-    include_crop = next_wordbreak - size + 1 >= min_crop
-
-  if include_crop:
-    return_msg, remainder = msg[:size], msg[size:]
-
-    if end_type == Ending.HYPHEN:
-      remainder = return_msg[-1] + remainder
-      return_msg = return_msg[:-1].rstrip() + "-"
-  else:
-    return_msg, remainder = msg[:last_wordbreak], msg[last_wordbreak:]
-
-  # if this is ending with a comma or period then strip it off
-
-  if not get_remainder and return_msg and return_msg[-1] in (",", "."):
-    return_msg = return_msg[:-1]
-
-  if end_type == Ending.ELLIPSE:
-    return_msg = return_msg.rstrip() + "..."
-
-  if get_remainder:
-    return (return_msg, remainder)
-  else:
-    return return_msg
-
-
-def pad_str(msg, size, crop_extra = False):
-  """
-  Provides the string padded with whitespace to the given length.
-
-  Arguments:
-    msg       - string to be padded
-    size      - length to be padded to
-    crop_extra - crops string if it's longer than the size if true
-  """
-
-  if crop_extra:
-    msg = msg[:size]
-
-  return ("%%-%is" % size) % msg
-
-
-def draw_box(panel, top, left, width, height, attr=curses.A_NORMAL):
-  """
-  Draws a box in the panel with the given bounds.
-
-  Arguments:
-    panel  - panel in which to draw
-    top    - vertical position of the box's top
-    left   - horizontal position of the box's left side
-    width  - width of the drawn box
-    height - height of the drawn box
-    attr   - text attributes
-  """
-
-  # draws the top and bottom
-
-  panel.hline(top, left + 1, width - 2, attr)
-  panel.hline(top + height - 1, left + 1, width - 2, attr)
-
-  # draws the left and right sides
-
-  panel.vline(top + 1, left, height - 2, attr)
-  panel.vline(top + 1, left + width - 1, height - 2, attr)
-
-  # draws the corners
-
-  panel.addch(top, left, curses.ACS_ULCORNER, attr)
-  panel.addch(top, left + width - 1, curses.ACS_URCORNER, attr)
-  panel.addch(top + height - 1, left, curses.ACS_LLCORNER, attr)
-
-
-def is_selection_key(key):
-  """
-  Returns true if the keycode matches the enter or space keys.
-
-  Argument:
-    key - keycode to be checked
-  """
-
-  return key in (curses.KEY_ENTER, 10, ord(' '))
-
-
-def is_scroll_key(key):
-  """
-  Returns true if the keycode is recognized by the get_scroll_position function
-  for scrolling.
-
-  Argument:
-    key - keycode to be checked
-  """
-
-  return key in SCROLL_KEYS
-
-
-def get_scroll_position(key, position, page_height, content_height, is_cursor = False):
-  """
-  Parses navigation keys, providing the new scroll possition the panel should
-  use. Position is always between zero and (content_height - page_height). This
-  handles the following keys:
-  Up / Down - scrolls a position up or down
-  Page Up / Page Down - scrolls by the page_height
-  Home - top of the content
-  End - bottom of the content
-
-  This provides the input position if the key doesn't correspond to the above.
-
-  Arguments:
-    key           - keycode for the user's input
-    position      - starting position
-    page_height    - size of a single screen's worth of content
-    content_height - total lines of content that can be scrolled
-    is_cursor      - tracks a cursor position rather than scroll if true
-  """
-
-  if is_scroll_key(key):
-    shift = 0
-
-    if key == curses.KEY_UP:
-      shift = -1
-    elif key == curses.KEY_DOWN:
-      shift = 1
-    elif key == curses.KEY_PPAGE:
-      shift = -page_height + 1 if is_cursor else -page_height
-    elif key == curses.KEY_NPAGE:
-      shift = page_height - 1 if is_cursor else page_height
-    elif key == curses.KEY_HOME:
-      shift = -content_height
-    elif key == curses.KEY_END:
-      shift = content_height
-
-    # returns the shift, restricted to valid bounds
-
-    max_location = content_height - 1 if is_cursor else content_height - page_height
-    return max(0, min(position + shift, max_location))
-  else:
-    return position
-
-
-class Scroller:
-  """
-  Tracks the scrolling position when there might be a visible cursor. This
-  expects that there is a single line displayed per an entry in the contents.
-  """
-
-  def __init__(self, is_cursor_enabled):
-    self.scroll_location, self.cursor_location = 0, 0
-    self.cursor_selection = None
-    self.is_cursor_enabled = is_cursor_enabled
-
-  def get_scroll_location(self, content, page_height):
-    """
-    Provides the scrolling location, taking into account its cursor's location
-    content size, and page height.
-
-    Arguments:
-      content    - displayed content
-      page_height - height of the display area for the content
-    """
-
-    if content and page_height:
-      self.scroll_location = max(0, min(self.scroll_location, len(content) - page_height + 1))
-
-      if self.is_cursor_enabled:
-        self.get_cursor_selection(content)  # resets the cursor location
-
-        # makes sure the cursor is visible
-
-        if self.cursor_location < self.scroll_location:
-          self.scroll_location = self.cursor_location
-        elif self.cursor_location > self.scroll_location + page_height - 1:
-          self.scroll_location = self.cursor_location - page_height + 1
-
-      # checks if the bottom would run off the content (this could be the
-      # case when the content's size is dynamic and entries are removed)
-
-      if len(content) > page_height:
-        self.scroll_location = min(self.scroll_location, len(content) - page_height)
-
-    return self.scroll_location
-
-  def get_cursor_selection(self, content):
-    """
-    Provides the selected item in the content. This is the same entry until
-    the cursor moves or it's no longer available (in which case it moves on to
-    the next entry).
-
-    Arguments:
-      content - displayed content
-    """
-
-    # TODO: needs to handle duplicate entries when using this for the
-    # connection panel
-
-    if not self.is_cursor_enabled:
-      return None
-    elif not content:
-      self.cursor_location, self.cursor_selection = 0, None
-      return None
-
-    self.cursor_location = min(self.cursor_location, len(content) - 1)
-
-    if self.cursor_selection is not None and self.cursor_selection in content:
-      # moves cursor location to track the selection
-      self.cursor_location = content.index(self.cursor_selection)
-    else:
-      # select the next closest entry
-      self.cursor_selection = content[self.cursor_location]
-
-    return self.cursor_selection
-
-  def handle_key(self, key, content, page_height):
-    """
-    Moves either the scroll or cursor according to the given input.
-
-    Arguments:
-      key        - key code of user input
-      content    - displayed content
-      page_height - height of the display area for the content
-    """
-
-    if self.is_cursor_enabled:
-      self.get_cursor_selection(content)  # resets the cursor location
-      start_location = self.cursor_location
-    else:
-      start_location = self.scroll_location
-
-    new_location = get_scroll_position(key, start_location, page_height, len(content), self.is_cursor_enabled)
-
-    if start_location != new_location:
-      if self.is_cursor_enabled:
-        self.cursor_selection = content[new_location]
-      else:
-        self.scroll_location = new_location
-
-      return True
-    else:
-      return False
-
-
-def is_wide_characters_supported():
-  """
-  Checks if our version of curses has wide character support. This is required
-  to print unicode.
-
-  :returns: **bool** that's **True** if curses supports wide characters, and
-    **False** if it either can't or this can't be determined
-  """
-
-  try:
-    # Gets the dynamic library used by the interpretor for curses. This uses
-    # 'ldd' on Linux or 'otool -L' on OSX.
-    #
-    # atagar at fenrir:~/Desktop$ ldd /usr/lib/python2.6/lib-dynload/_curses.so
-    #   linux-gate.so.1 =>  (0x00a51000)
-    #   libncursesw.so.5 => /lib/libncursesw.so.5 (0x00faa000)
-    #   libpthread.so.0 => /lib/tls/i686/cmov/libpthread.so.0 (0x002f1000)
-    #   libc.so.6 => /lib/tls/i686/cmov/libc.so.6 (0x00158000)
-    #   libdl.so.2 => /lib/tls/i686/cmov/libdl.so.2 (0x00398000)
-    #   /lib/ld-linux.so.2 (0x00ca8000)
-    #
-    # atagar$ otool -L /System/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/lib-dynload/_curses.so
-    # /System/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/lib-dynload/_curses.so:
-    #   /usr/lib/libncurses.5.4.dylib (compatibility version 5.4.0, current version 5.4.0)
-    #   /usr/lib/libgcc_s.1.dylib (compatibility version 1.0.0, current version 1.0.0)
-    #   /usr/lib/libSystem.B.dylib (compatibility version 1.0.0, current version 111.1.6)
-
-    import _curses
-
-    lib_dependency_lines = None
-
-    if system.is_available("ldd"):
-      lib_dependency_lines = system.call("ldd %s" % _curses.__file__)
-    elif system.is_available("otool"):
-      lib_dependency_lines = system.call("otool -L %s" % _curses.__file__)
-
-    if lib_dependency_lines:
-      for line in lib_dependency_lines:
-        if "libncursesw" in line:
-          return True
-  except:
-    pass
-
-  return False
-
-
-def _init_colors():
-  """
-  Initializes color mappings usable by curses. This can only be done after
-  calling curses.initscr().
-  """
-
-  global COLOR_ATTR_INITIALIZED, COLOR_IS_SUPPORTED
-
-  if not COLOR_ATTR_INITIALIZED:
-    # hack to replace all ACS characters with '+' if ACS support has been
-    # manually disabled
-
-    if not CONFIG["features.acsSupport"]:
-      for item in curses.__dict__:
-        if item.startswith("ACS_"):
-          curses.__dict__[item] = ord('+')
-
-      # replace a few common border pipes that are better rendered as '|' or
-      # '-' instead
-
-      curses.ACS_SBSB = ord('|')
-      curses.ACS_VLINE = ord('|')
-      curses.ACS_BSBS = ord('-')
-      curses.ACS_HLINE = ord('-')
-
-    COLOR_ATTR_INITIALIZED = True
-    COLOR_IS_SUPPORTED = False
-
-    if not CONFIG["features.colorInterface"]:
-      return
-
-    try:
-      COLOR_IS_SUPPORTED = curses.has_colors()
-    except curses.error:
-      return  # initscr hasn't been called yet
-
-    # initializes color mappings if color support is available
-    if COLOR_IS_SUPPORTED:
-      colorpair = 0
-      log.info("Terminal color support detected and enabled")
-
-      for color_name in COLOR_LIST:
-        foreground_color = COLOR_LIST[color_name]
-        background_color = -1  # allows for default (possibly transparent) background
-        colorpair += 1
-        curses.init_pair(colorpair, foreground_color, background_color)
-        COLOR_ATTR[color_name] = curses.color_pair(colorpair)
-    else:
-      log.info("Terminal color support unavailable")
diff --git a/arm/util/ui_tools.py b/arm/util/ui_tools.py
new file mode 100644
index 0000000..7a1a601
--- /dev/null
+++ b/arm/util/ui_tools.py
@@ -0,0 +1,628 @@
+"""
+Toolkit for common ui tasks when working with curses. This provides a quick and
+easy method of providing the following interface components:
+- preinitialized curses color attributes
+- unit conversion for labels
+"""
+
+import sys
+import curses
+
+from curses.ascii import isprint
+
+from stem.util import conf, enum, log, system
+
+# colors curses can handle
+COLOR_LIST = {
+  "red": curses.COLOR_RED,
+  "green": curses.COLOR_GREEN,
+  "yellow": curses.COLOR_YELLOW,
+  "blue": curses.COLOR_BLUE,
+  "cyan": curses.COLOR_CYAN,
+  "magenta": curses.COLOR_MAGENTA,
+  "black": curses.COLOR_BLACK,
+  "white": curses.COLOR_WHITE,
+}
+
+# boolean for if we have color support enabled, None not yet determined
+COLOR_IS_SUPPORTED = None
+
+# mappings for get_color() - this uses the default terminal color scheme if
+# color support is unavailable
+COLOR_ATTR_INITIALIZED = False
+COLOR_ATTR = dict([(color, 0) for color in COLOR_LIST])
+
+Ending = enum.Enum("ELLIPSE", "HYPHEN")
+SCROLL_KEYS = (curses.KEY_UP, curses.KEY_DOWN, curses.KEY_PPAGE, curses.KEY_NPAGE, curses.KEY_HOME, curses.KEY_END)
+
+
+def conf_handler(key, value):
+  if key == "features.color_override" and value != "none":
+    try:
+      set_color_override(value)
+    except ValueError as exc:
+      log.notice(exc)
+
+
+CONFIG = conf.config_dict("arm", {
+  "features.color_override": "none",
+  "features.colorInterface": True,
+  "features.acsSupport": True,
+}, conf_handler)
+
+
+def demo_glyphs():
+  """
+  Displays all ACS options with their corresponding representation. These are
+  undocumented in the pydocs. For more information see the following man page:
+  http://www.mkssoftware.com/docs/man5/terminfo.5.asp
+  """
+
+  try:
+    curses.wrapper(_show_glyphs)
+  except KeyboardInterrupt:
+    pass  # quit
+
+
+def _show_glyphs(stdscr):
+  """
+  Renders a chart with the ACS glyphs.
+  """
+
+  # allows things like semi-transparent backgrounds
+
+  try:
+    curses.use_default_colors()
+  except curses.error:
+    pass
+
+  # attempts to make the cursor invisible
+
+  try:
+    curses.curs_set(0)
+  except curses.error:
+    pass
+
+  acs_options = [item for item in curses.__dict__.items() if item[0].startswith("ACS_")]
+  acs_options.sort(key=lambda i: (i[1]))  # order by character codes
+
+  # displays a chart with all the glyphs and their representations
+
+  height, width = stdscr.getmaxyx()
+
+  if width < 30:
+    return  # not enough room to show a column
+
+  columns = width / 30
+
+  # display title
+
+  stdscr.addstr(0, 0, "Curses Glyphs:", curses.A_STANDOUT)
+
+  x, y = 0, 1
+
+  while acs_options:
+    name, keycode = acs_options.pop(0)
+    stdscr.addstr(y, x * 30, "%s (%i)" % (name, keycode))
+    stdscr.addch(y, (x * 30) + 25, keycode)
+
+    x += 1
+
+    if x >= columns:
+      x, y = 0, y + 1
+
+      if y >= height:
+        break
+
+  stdscr.getch()  # quit on keyboard input
+
+
+def get_printable(line, keep_newlines = True):
+  """
+  Provides the line back with non-printable characters stripped.
+
+  Arguments:
+    line          - string to be processed
+    stripNewlines - retains newlines if true, stripped otherwise
+  """
+
+  line = line.replace('\xc2', "'")
+  line = "".join([char for char in line if (isprint(char) or (keep_newlines and char == "\n"))])
+
+  return line
+
+
+def is_color_supported():
+  """
+  True if the display supports showing color, false otherwise.
+  """
+
+  if COLOR_IS_SUPPORTED is None:
+    _init_colors()
+
+  return COLOR_IS_SUPPORTED
+
+
+def get_color(color):
+  """
+  Provides attribute corresponding to a given text color. Supported colors
+  include:
+  red       green     yellow    blue
+  cyan      magenta   black     white
+
+  If color support isn't available or colors can't be initialized then this uses the
+  terminal's default coloring scheme.
+
+  Arguments:
+    color - name of the foreground color to be returned
+  """
+
+  color_override = get_color_override()
+
+  if color_override:
+    color = color_override
+
+  if not COLOR_ATTR_INITIALIZED:
+    _init_colors()
+
+  return COLOR_ATTR[color]
+
+
+def set_color_override(color = None):
+  """
+  Overwrites all requests for color with the given color instead. This raises
+  a ValueError if the color is invalid.
+
+  Arguments:
+    color - name of the color to overwrite requests with, None to use normal
+            coloring
+  """
+
+  if color is None:
+    CONFIG["features.color_override"] = "none"
+  elif color in COLOR_LIST.keys():
+    CONFIG["features.color_override"] = color
+  else:
+    raise ValueError("\"%s\" isn't a valid color" % color)
+
+
+def get_color_override():
+  """
+  Provides the override color used by the interface, None if it isn't set.
+  """
+
+  color_override = CONFIG.get("features.color_override", "none")
+
+  if color_override == "none":
+    return None
+  else:
+    return color_override
+
+
+def crop_str(msg, size, min_word_length = 4, min_crop = 0, end_type = Ending.ELLIPSE, get_remainder = False):
+  """
+  Provides the msg constrained to the given length, truncating on word breaks.
+  If the last words is long this truncates mid-word with an ellipse. If there
+  isn't room for even a truncated single word (or one word plus the ellipse if
+  including those) then this provides an empty string. If a cropped string ends
+  with a comma or period then it's stripped (unless we're providing the
+  remainder back). Examples:
+
+  crop_str("This is a looooong message", 17)
+  "This is a looo..."
+
+  crop_str("This is a looooong message", 12)
+  "This is a..."
+
+  crop_str("This is a looooong message", 3)
+  ""
+
+  Arguments:
+    msg             - source text
+    size            - room available for text
+    min_word_length - minimum characters before which a word is dropped, requires
+                      whole word if None
+    min_crop        - minimum characters that must be dropped if a word's cropped
+    end_type        - type of ending used when truncating:
+                      None - blank ending
+                      Ending.ELLIPSE - includes an ellipse
+                      Ending.HYPHEN - adds hyphen when breaking words
+    get_remainder   - returns a tuple instead, with the second part being the
+                      cropped portion of the message
+  """
+
+  # checks if there's room for the whole message
+
+  if len(msg) <= size:
+    if get_remainder:
+      return (msg, "")
+    else:
+      return msg
+
+  # avoids negative input
+
+  size = max(0, size)
+
+  if min_word_length is not None:
+    min_word_length = max(0, min_word_length)
+
+  min_crop = max(0, min_crop)
+
+  # since we're cropping, the effective space available is less with an
+  # ellipse, and cropping words requires an extra space for hyphens
+
+  if end_type == Ending.ELLIPSE:
+    size -= 3
+  elif end_type == Ending.HYPHEN and min_word_length is not None:
+    min_word_length += 1
+
+  # checks if there isn't the minimum space needed to include anything
+
+  last_wordbreak = msg.rfind(" ", 0, size + 1)
+
+  if last_wordbreak == -1:
+    # we're splitting the first word
+
+    if min_word_length is None or size < min_word_length:
+      if get_remainder:
+        return ("", msg)
+      else:
+        return ""
+
+    include_crop = True
+  else:
+    last_wordbreak = len(msg[:last_wordbreak].rstrip())  # drops extra ending whitespaces
+
+    if (min_word_length is not None and size < min_word_length) or (min_word_length is None and last_wordbreak < 1):
+      if get_remainder:
+        return ("", msg)
+      else:
+        return ""
+
+    if min_word_length is None:
+      min_word_length = sys.maxint
+
+    include_crop = size - last_wordbreak - 1 >= min_word_length
+
+  # if there's a max crop size then make sure we're cropping at least that many characters
+
+  if include_crop and min_crop:
+    next_wordbreak = msg.find(" ", size)
+
+    if next_wordbreak == -1:
+      next_wordbreak = len(msg)
+
+    include_crop = next_wordbreak - size + 1 >= min_crop
+
+  if include_crop:
+    return_msg, remainder = msg[:size], msg[size:]
+
+    if end_type == Ending.HYPHEN:
+      remainder = return_msg[-1] + remainder
+      return_msg = return_msg[:-1].rstrip() + "-"
+  else:
+    return_msg, remainder = msg[:last_wordbreak], msg[last_wordbreak:]
+
+  # if this is ending with a comma or period then strip it off
+
+  if not get_remainder and return_msg and return_msg[-1] in (",", "."):
+    return_msg = return_msg[:-1]
+
+  if end_type == Ending.ELLIPSE:
+    return_msg = return_msg.rstrip() + "..."
+
+  if get_remainder:
+    return (return_msg, remainder)
+  else:
+    return return_msg
+
+
+def pad_str(msg, size, crop_extra = False):
+  """
+  Provides the string padded with whitespace to the given length.
+
+  Arguments:
+    msg       - string to be padded
+    size      - length to be padded to
+    crop_extra - crops string if it's longer than the size if true
+  """
+
+  if crop_extra:
+    msg = msg[:size]
+
+  return ("%%-%is" % size) % msg
+
+
+def draw_box(panel, top, left, width, height, attr=curses.A_NORMAL):
+  """
+  Draws a box in the panel with the given bounds.
+
+  Arguments:
+    panel  - panel in which to draw
+    top    - vertical position of the box's top
+    left   - horizontal position of the box's left side
+    width  - width of the drawn box
+    height - height of the drawn box
+    attr   - text attributes
+  """
+
+  # draws the top and bottom
+
+  panel.hline(top, left + 1, width - 2, attr)
+  panel.hline(top + height - 1, left + 1, width - 2, attr)
+
+  # draws the left and right sides
+
+  panel.vline(top + 1, left, height - 2, attr)
+  panel.vline(top + 1, left + width - 1, height - 2, attr)
+
+  # draws the corners
+
+  panel.addch(top, left, curses.ACS_ULCORNER, attr)
+  panel.addch(top, left + width - 1, curses.ACS_URCORNER, attr)
+  panel.addch(top + height - 1, left, curses.ACS_LLCORNER, attr)
+
+
+def is_selection_key(key):
+  """
+  Returns true if the keycode matches the enter or space keys.
+
+  Argument:
+    key - keycode to be checked
+  """
+
+  return key in (curses.KEY_ENTER, 10, ord(' '))
+
+
+def is_scroll_key(key):
+  """
+  Returns true if the keycode is recognized by the get_scroll_position function
+  for scrolling.
+
+  Argument:
+    key - keycode to be checked
+  """
+
+  return key in SCROLL_KEYS
+
+
+def get_scroll_position(key, position, page_height, content_height, is_cursor = False):
+  """
+  Parses navigation keys, providing the new scroll possition the panel should
+  use. Position is always between zero and (content_height - page_height). This
+  handles the following keys:
+  Up / Down - scrolls a position up or down
+  Page Up / Page Down - scrolls by the page_height
+  Home - top of the content
+  End - bottom of the content
+
+  This provides the input position if the key doesn't correspond to the above.
+
+  Arguments:
+    key           - keycode for the user's input
+    position      - starting position
+    page_height    - size of a single screen's worth of content
+    content_height - total lines of content that can be scrolled
+    is_cursor      - tracks a cursor position rather than scroll if true
+  """
+
+  if is_scroll_key(key):
+    shift = 0
+
+    if key == curses.KEY_UP:
+      shift = -1
+    elif key == curses.KEY_DOWN:
+      shift = 1
+    elif key == curses.KEY_PPAGE:
+      shift = -page_height + 1 if is_cursor else -page_height
+    elif key == curses.KEY_NPAGE:
+      shift = page_height - 1 if is_cursor else page_height
+    elif key == curses.KEY_HOME:
+      shift = -content_height
+    elif key == curses.KEY_END:
+      shift = content_height
+
+    # returns the shift, restricted to valid bounds
+
+    max_location = content_height - 1 if is_cursor else content_height - page_height
+    return max(0, min(position + shift, max_location))
+  else:
+    return position
+
+
+class Scroller:
+  """
+  Tracks the scrolling position when there might be a visible cursor. This
+  expects that there is a single line displayed per an entry in the contents.
+  """
+
+  def __init__(self, is_cursor_enabled):
+    self.scroll_location, self.cursor_location = 0, 0
+    self.cursor_selection = None
+    self.is_cursor_enabled = is_cursor_enabled
+
+  def get_scroll_location(self, content, page_height):
+    """
+    Provides the scrolling location, taking into account its cursor's location
+    content size, and page height.
+
+    Arguments:
+      content    - displayed content
+      page_height - height of the display area for the content
+    """
+
+    if content and page_height:
+      self.scroll_location = max(0, min(self.scroll_location, len(content) - page_height + 1))
+
+      if self.is_cursor_enabled:
+        self.get_cursor_selection(content)  # resets the cursor location
+
+        # makes sure the cursor is visible
+
+        if self.cursor_location < self.scroll_location:
+          self.scroll_location = self.cursor_location
+        elif self.cursor_location > self.scroll_location + page_height - 1:
+          self.scroll_location = self.cursor_location - page_height + 1
+
+      # checks if the bottom would run off the content (this could be the
+      # case when the content's size is dynamic and entries are removed)
+
+      if len(content) > page_height:
+        self.scroll_location = min(self.scroll_location, len(content) - page_height)
+
+    return self.scroll_location
+
+  def get_cursor_selection(self, content):
+    """
+    Provides the selected item in the content. This is the same entry until
+    the cursor moves or it's no longer available (in which case it moves on to
+    the next entry).
+
+    Arguments:
+      content - displayed content
+    """
+
+    # TODO: needs to handle duplicate entries when using this for the
+    # connection panel
+
+    if not self.is_cursor_enabled:
+      return None
+    elif not content:
+      self.cursor_location, self.cursor_selection = 0, None
+      return None
+
+    self.cursor_location = min(self.cursor_location, len(content) - 1)
+
+    if self.cursor_selection is not None and self.cursor_selection in content:
+      # moves cursor location to track the selection
+      self.cursor_location = content.index(self.cursor_selection)
+    else:
+      # select the next closest entry
+      self.cursor_selection = content[self.cursor_location]
+
+    return self.cursor_selection
+
+  def handle_key(self, key, content, page_height):
+    """
+    Moves either the scroll or cursor according to the given input.
+
+    Arguments:
+      key        - key code of user input
+      content    - displayed content
+      page_height - height of the display area for the content
+    """
+
+    if self.is_cursor_enabled:
+      self.get_cursor_selection(content)  # resets the cursor location
+      start_location = self.cursor_location
+    else:
+      start_location = self.scroll_location
+
+    new_location = get_scroll_position(key, start_location, page_height, len(content), self.is_cursor_enabled)
+
+    if start_location != new_location:
+      if self.is_cursor_enabled:
+        self.cursor_selection = content[new_location]
+      else:
+        self.scroll_location = new_location
+
+      return True
+    else:
+      return False
+
+
+def is_wide_characters_supported():
+  """
+  Checks if our version of curses has wide character support. This is required
+  to print unicode.
+
+  :returns: **bool** that's **True** if curses supports wide characters, and
+    **False** if it either can't or this can't be determined
+  """
+
+  try:
+    # Gets the dynamic library used by the interpretor for curses. This uses
+    # 'ldd' on Linux or 'otool -L' on OSX.
+    #
+    # atagar at fenrir:~/Desktop$ ldd /usr/lib/python2.6/lib-dynload/_curses.so
+    #   linux-gate.so.1 =>  (0x00a51000)
+    #   libncursesw.so.5 => /lib/libncursesw.so.5 (0x00faa000)
+    #   libpthread.so.0 => /lib/tls/i686/cmov/libpthread.so.0 (0x002f1000)
+    #   libc.so.6 => /lib/tls/i686/cmov/libc.so.6 (0x00158000)
+    #   libdl.so.2 => /lib/tls/i686/cmov/libdl.so.2 (0x00398000)
+    #   /lib/ld-linux.so.2 (0x00ca8000)
+    #
+    # atagar$ otool -L /System/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/lib-dynload/_curses.so
+    # /System/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/lib-dynload/_curses.so:
+    #   /usr/lib/libncurses.5.4.dylib (compatibility version 5.4.0, current version 5.4.0)
+    #   /usr/lib/libgcc_s.1.dylib (compatibility version 1.0.0, current version 1.0.0)
+    #   /usr/lib/libSystem.B.dylib (compatibility version 1.0.0, current version 111.1.6)
+
+    import _curses
+
+    lib_dependency_lines = None
+
+    if system.is_available("ldd"):
+      lib_dependency_lines = system.call("ldd %s" % _curses.__file__)
+    elif system.is_available("otool"):
+      lib_dependency_lines = system.call("otool -L %s" % _curses.__file__)
+
+    if lib_dependency_lines:
+      for line in lib_dependency_lines:
+        if "libncursesw" in line:
+          return True
+  except:
+    pass
+
+  return False
+
+
+def _init_colors():
+  """
+  Initializes color mappings usable by curses. This can only be done after
+  calling curses.initscr().
+  """
+
+  global COLOR_ATTR_INITIALIZED, COLOR_IS_SUPPORTED
+
+  if not COLOR_ATTR_INITIALIZED:
+    # hack to replace all ACS characters with '+' if ACS support has been
+    # manually disabled
+
+    if not CONFIG["features.acsSupport"]:
+      for item in curses.__dict__:
+        if item.startswith("ACS_"):
+          curses.__dict__[item] = ord('+')
+
+      # replace a few common border pipes that are better rendered as '|' or
+      # '-' instead
+
+      curses.ACS_SBSB = ord('|')
+      curses.ACS_VLINE = ord('|')
+      curses.ACS_BSBS = ord('-')
+      curses.ACS_HLINE = ord('-')
+
+    COLOR_ATTR_INITIALIZED = True
+    COLOR_IS_SUPPORTED = False
+
+    if not CONFIG["features.colorInterface"]:
+      return
+
+    try:
+      COLOR_IS_SUPPORTED = curses.has_colors()
+    except curses.error:
+      return  # initscr hasn't been called yet
+
+    # initializes color mappings if color support is available
+    if COLOR_IS_SUPPORTED:
+      colorpair = 0
+      log.info("Terminal color support detected and enabled")
+
+      for color_name in COLOR_LIST:
+        foreground_color = COLOR_LIST[color_name]
+        background_color = -1  # allows for default (possibly transparent) background
+        colorpair += 1
+        curses.init_pair(colorpair, foreground_color, background_color)
+        COLOR_ATTR[color_name] = curses.color_pair(colorpair)
+    else:
+      log.info("Terminal color support unavailable")






More information about the tor-commits mailing list