diff --git a/vendor/distribute-0.6.34/CHANGES.txt b/vendor/distribute-0.6.34/CHANGES.txt
deleted file mode 100644
index 1b24810806f207e8dc900b92edc3c02bb0d98a7b..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/CHANGES.txt
+++ /dev/null
@@ -1,487 +0,0 @@
-=======
-CHANGES
-=======
-
-----------
-Unreleased
-----------
-
-+ Issue #341: 0.6.33 fails to build under python 2.4
-
-------
-0.6.33
-------
-
-* Fix 2 errors with Jython 2.5.
-* Fix 1 failure with Jython 2.5 and 2.7.
-* Disable workaround for Jython scripts on Linux systems.
-* Issue #336: `setup.py` no longer masks failure exit code when tests fail.
-* Fix issue in pkg_resources where try/except around a platform-dependent
-  import would trigger hook load failures on Mercurial. See pull request 32
-  for details.
-* Issue #341: Fix a ResourceWarning.
-
-------
-0.6.32
-------
-
-* Fix test suite with Python 2.6.
-* Fix some DeprecationWarnings and ResourceWarnings.
-* Issue #335: Backed out `setup_requires` superceding installed requirements
-  until regression can be addressed.
-
-------
-0.6.31
-------
-
-* Issue #303: Make sure the manifest only ever contains UTF-8 in Python 3.
-* Issue #329: Properly close files created by tests for compatibility with
-  Jython.
-* Work around Jython bugs `#1980 <http://bugs.jython.org/issue1980>`_ and
-  `#1981 <http://bugs.jython.org/issue1981>`_.
-* Issue #334: Provide workaround for packages that reference `sys.__stdout__`
-  such as numpy does. This change should address
-  `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
-  as the system encoding is UTF-8 or the IO encoding is specified in the
-  environment, i.e.::
-
-     PYTHONIOENCODING=utf8 pip install numpy
-
-* Fix for encoding issue when installing from Windows executable on Python 3.
-* Issue #323: Allow `setup_requires` requirements to supercede installed
-  requirements. Added some new keyword arguments to existing pkg_resources
-  methods. Also had to updated how __path__ is handled for namespace packages
-  to ensure that when a new egg distribution containing a namespace package is
-  placed on sys.path, the entries in __path__ are found in the same order they
-  would have been in had that egg been on the path when pkg_resources was
-  first imported.
-
-------
-0.6.30
-------
-
-* Issue #328: Clean up temporary directories in distribute_setup.py.
-* Fix fatal bug in distribute_setup.py.
-
-------
-0.6.29
-------
-
-* Pull Request #14: Honor file permissions in zip files.
-* Issue #327: Merged pull request #24 to fix a dependency problem with pip.
-* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
-* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
-  to produce uploadable documentation.
-* Issue #326: `upload_docs` provided mangled auth credentials under Python 3.
-* Issue #320: Fix check for "createable" in distribute_setup.py.
-* Issue #305: Remove a warning that was triggered during normal operations.
-* Issue #311: Print metadata in UTF-8 independent of platform.
-* Issue #303: Read manifest file with UTF-8 encoding under Python 3.
-* Issue #301: Allow to run tests of namespace packages when using 2to3.
-* Issue #304: Prevent import loop in site.py under Python 3.3.
-* Issue #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
-* Issue #299: The develop command didn't work on Python 3, when using 2to3,
-  as the egg link would go to the Python 2 source. Linking to the 2to3'd code
-  in build/lib makes it work, although you will have to rebuild the module
-  before testing it.
-* Issue #306: Even if 2to3 is used, we build in-place under Python 2.
-* Issue #307: Prints the full path when .svn/entries is broken.
-* Issue #313: Support for sdist subcommands (Python 2.7)
-* Issue #314: test_local_index() would fail an OS X.
-* Issue #310: Non-ascii characters in a namespace __init__.py causes errors.
-* Issue #218: Improved documentation on behavior of `package_data` and
-  `include_package_data`. Files indicated by `package_data` are now included
-  in the manifest.
-* `distribute_setup.py` now allows a `--download-base` argument for retrieving
-  distribute from a specified location.
-
-------
-0.6.28
-------
-
-* Issue #294: setup.py can now be invoked from any directory.
-* Scripts are now installed honoring the umask.
-* Added support for .dist-info directories.
-* Issue #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
-  Python 3.3.
-
-------
-0.6.27
-------
-
-* Support current snapshots of CPython 3.3.
-* Distribute now recognizes README.rst as a standard, default readme file.
-* Exclude 'encodings' modules when removing modules from sys.modules.
-  Workaround for #285.
-* Issue #231: Don't fiddle with system python when used with buildout
-  (bootstrap.py)
-
-------
-0.6.26
-------
-
-* Issue #183: Symlinked files are now extracted from source distributions.
-* Issue #227: Easy_install fetch parameters are now passed during the
-  installation of a source distribution; now fulfillment of setup_requires
-  dependencies will honor the parameters passed to easy_install.
-
-------
-0.6.25
-------
-
-* Issue #258: Workaround a cache issue
-* Issue #260: distribute_setup.py now accepts the --user parameter for
-  Python 2.6 and later.
-* Issue #262: package_index.open_with_auth no longer throws LookupError
-  on Python 3.
-* Issue #269: AttributeError when an exception occurs reading Manifest.in
-  on late releases of Python.
-* Issue #272: Prevent TypeError when namespace package names are unicode
-  and single-install-externally-managed is used. Also fixes PIP issue
-  449.
-* Issue #273: Legacy script launchers now install with Python2/3 support.
-
-------
-0.6.24
-------
-
-* Issue #249: Added options to exclude 2to3 fixers
-
-------
-0.6.23
-------
-
-* Issue #244: Fixed a test
-* Issue #243: Fixed a test
-* Issue #239: Fixed a test
-* Issue #240: Fixed a test
-* Issue #241: Fixed a test
-* Issue #237: Fixed a test
-* Issue #238: easy_install now uses 64bit executable wrappers on 64bit Python
-* Issue #208: Fixed parsed_versions, it now honors post-releases as noted in the documentation
-* Issue #207: Windows cli and gui wrappers pass CTRL-C to child python process
-* Issue #227: easy_install now passes its arguments to setup.py bdist_egg
-* Issue #225: Fixed a NameError on Python 2.5, 2.4
-
-------
-0.6.21
-------
-
-* Issue #225: FIxed a regression on py2.4
-
-------
-0.6.20
-------
-
-* Issue #135: Include url in warning when processing URLs in package_index.
-* Issue #212: Fix issue where easy_instal fails on Python 3 on windows installer.
-* Issue #213: Fix typo in documentation.
-
-------
-0.6.19
-------
-
-* Issue 206: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
-
-------
-0.6.18
-------
-
-* Issue 210: Fixed a regression introduced by Issue 204 fix.
-
-------
-0.6.17
-------
-
-* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
-  variable to allow to disable installation of easy_install-${version} script.
-* Support Python >=3.1.4 and >=3.2.1.
-* Issue 204: Don't try to import the parent of a namespace package in
-  declare_namespace
-* Issue 196: Tolerate responses with multiple Content-Length headers
-* Issue 205: Sandboxing doesn't preserve working_set. Leads to setup_requires
-  problems.
-
-------
-0.6.16
-------
-
-* Builds sdist gztar even on Windows (avoiding Issue 193).
-* Issue 192: Fixed metadata omitted on Windows when package_dir
-  specified with forward-slash.
-* Issue 195: Cython build support.
-* Issue 200: Issues with recognizing 64-bit packages on Windows.
-
-------
-0.6.15
-------
-
-* Fixed typo in bdist_egg
-* Several issues under Python 3 has been solved.
-* Issue 146: Fixed missing DLL files after easy_install of windows exe package.
-
-------
-0.6.14
-------
-
-* Issue 170: Fixed unittest failure. Thanks to Toshio.
-* Issue 171: Fixed race condition in unittests cause deadlocks in test suite.
-* Issue 143: Fixed a lookup issue with easy_install.
-  Thanks to David and Zooko.
-* Issue 174: Fixed the edit mode when its used with setuptools itself
-
-------
-0.6.13
-------
-
-* Issue 160: 2.7 gives ValueError("Invalid IPv6 URL")
-* Issue 150: Fixed using ~/.local even in a --no-site-packages virtualenv
-* Issue 163: scan index links before external links, and don't use the md5 when
-  comparing two distributions
-
-------
-0.6.12
-------
-
-* Issue 149: Fixed various failures on 2.3/2.4
-
-------
-0.6.11
-------
-
-* Found another case of SandboxViolation - fixed
-* Issue 15 and 48: Introduced a socket timeout of 15 seconds on url openings
-* Added indexsidebar.html into MANIFEST.in
-* Issue 108: Fixed TypeError with Python3.1
-* Issue 121: Fixed --help install command trying to actually install.
-* Issue 112: Added an os.makedirs so that Tarek's solution will work.
-* Issue 133: Added --no-find-links to easy_install
-* Added easy_install --user
-* Issue 100: Fixed develop --user not taking '.' in PYTHONPATH into account
-* Issue 134: removed spurious UserWarnings. Patch by VanLindberg
-* Issue 138: cant_write_to_target error when setup_requires is used.
-* Issue 147: respect the sys.dont_write_bytecode flag
-
-------
-0.6.10
-------
-
-* Reverted change made for the DistributionNotFound exception because
-  zc.buildout uses the exception message to get the name of the
-  distribution.
-
------
-0.6.9
------
-
-* Issue 90: unknown setuptools version can be added in the working set
-* Issue 87: setupt.py doesn't try to convert distribute_setup.py anymore
-  Initial Patch by arfrever.
-* Issue 89: added a side bar with a download link to the doc.
-* Issue 86: fixed missing sentence in pkg_resources doc.
-* Added a nicer error message when a DistributionNotFound is raised.
-* Issue 80: test_develop now works with Python 3.1
-* Issue 93: upload_docs now works if there is an empty sub-directory.
-* Issue 70: exec bit on non-exec files
-* Issue 99: now the standalone easy_install command doesn't uses a
-  "setup.cfg" if any exists in the working directory. It will use it
-  only if triggered by ``install_requires`` from a setup.py call
-  (install, develop, etc).
-* Issue 101: Allowing ``os.devnull`` in Sandbox
-* Issue 92: Fixed the "no eggs" found error with MacPort
-  (platform.mac_ver() fails)
-* Issue 103: test_get_script_header_jython_workaround not run
-  anymore under py3 with C or POSIX local. Contributed by Arfrever.
-* Issue 104: remvoved the assertion when the installation fails,
-  with a nicer message for the end user.
-* Issue 100: making sure there's no SandboxViolation when
-  the setup script patches setuptools.
-
------
-0.6.8
------
-
-* Added "check_packages" in dist. (added in Setuptools 0.6c11)
-* Fixed the DONT_PATCH_SETUPTOOLS state.
-
------
-0.6.7
------
-
-* Issue 58: Added --user support to the develop command
-* Issue 11: Generated scripts now wrap their call to the script entry point
-  in the standard "if name == 'main'"
-* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
-  can drive an installation that doesn't patch a global setuptools.
-* Reviewed unladen-swallow specific change from
-  http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
-  and determined that it no longer applies. Distribute should work fine with
-  Unladen Swallow 2009Q3.
-* Issue 21: Allow PackageIndex.open_url to gracefully handle all cases of a
-  httplib.HTTPException instead of just InvalidURL and BadStatusLine.
-* Removed virtual-python.py from this distribution and updated documentation
-  to point to the actively maintained virtualenv instead.
-* Issue 64: use_setuptools no longer rebuilds the distribute egg every
-  time it is run
-* use_setuptools now properly respects the requested version
-* use_setuptools will no longer try to import a distribute egg for the
-  wrong Python version
-* Issue 74: no_fake should be True by default.
-* Issue 72: avoid a bootstrapping issue with easy_install -U
-
------
-0.6.6
------
-
-* Unified the bootstrap file so it works on both py2.x and py3k without 2to3
-  (patch by Holger Krekel)
-
------
-0.6.5
------
-
-* Issue 65: cli.exe and gui.exe are now generated at build time,
-  depending on the platform in use.
-
-* Issue 67: Fixed doc typo (PEP 381/382)
-
-* Distribute no longer shadows setuptools if we require a 0.7-series
-  setuptools.  And an error is raised when installing a 0.7 setuptools with
-  distribute.
-
-* When run from within buildout, no attempt is made to modify an existing
-  setuptools egg, whether in a shared egg directory or a system setuptools.
-
-* Fixed a hole in sandboxing allowing builtin file to write outside of
-  the sandbox.
-
------
-0.6.4
------
-
-* Added the generation of `distribute_setup_3k.py` during the release.
-  This closes issue #52.
-
-* Added an upload_docs command to easily upload project documentation to
-  PyPI's http://packages.python.org. This close issue #56.
-
-* Fixed a bootstrap bug on the use_setuptools() API.
-
------
-0.6.3
------
-
-setuptools
-==========
-
-* Fixed a bunch of calls to file() that caused crashes on Python 3.
-
-bootstrapping
-=============
-
-* Fixed a bug in sorting that caused bootstrap to fail on Python 3.
-
------
-0.6.2
------
-
-setuptools
-==========
-
-* Added Python 3 support; see docs/python3.txt.
-  This closes http://bugs.python.org/setuptools/issue39.
-
-* Added option to run 2to3 automatically when installing on Python 3.
-  This closes issue #31.
-
-* Fixed invalid usage of requirement.parse, that broke develop -d.
-  This closes http://bugs.python.org/setuptools/issue44.
-
-* Fixed script launcher for 64-bit Windows.
-  This closes http://bugs.python.org/setuptools/issue2.
-
-* KeyError when compiling extensions.
-  This closes http://bugs.python.org/setuptools/issue41.
-
-bootstrapping
-=============
-
-* Fixed bootstrap not working on Windows. This closes issue #49.
-
-* Fixed 2.6 dependencies. This closes issue #50.
-
-* Make sure setuptools is patched when running through easy_install
-  This closes http://bugs.python.org/setuptools/issue40.
-
------
-0.6.1
------
-
-setuptools
-==========
-
-* package_index.urlopen now catches BadStatusLine and malformed url errors.
-  This closes issue #16 and issue #18.
-
-* zip_ok is now False by default. This closes
-  http://bugs.python.org/setuptools/issue33.
-
-* Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20.
-
-* Fixed invalid bootstraping with easy_install installation (issue #40).
-  Thanks to Florian Schulze for the help.
-
-* Removed buildout/bootstrap.py. A new repository will create a specific
-  bootstrap.py script.
-
-
-bootstrapping
-=============
-
-* The boostrap process leave setuptools alone if detected in the system
-  and --root or --prefix is provided, but is not in the same location.
-  This closes issue #10.
-
----
-0.6
----
-
-setuptools
-==========
-
-* Packages required at build time where not fully present at install time.
-  This closes issue #12.
-
-* Protected against failures in tarfile extraction. This closes issue #10.
-
-* Made Jython api_tests.txt doctest compatible. This closes issue #7.
-
-* sandbox.py replaced builtin type file with builtin function open. This
-  closes issue #6.
-
-* Immediately close all file handles. This closes issue #3.
-
-* Added compatibility with Subversion 1.6. This references issue #1.
-
-pkg_resources
-=============
-
-* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
-  instead. Based on a patch from ronaldoussoren. This closes issue #5.
-
-* Fixed a SandboxViolation for mkdir that could occur in certain cases.
-  This closes issue #13.
-
-* Allow to find_on_path on systems with tight permissions to fail gracefully.
-  This closes issue #9.
-
-* Corrected inconsistency between documentation and code of add_entry.
-  This closes issue #8.
-
-* Immediately close all file handles. This closes issue #3.
-
-easy_install
-============
-
-* Immediately close all file handles. This closes issue #3.
-
diff --git a/vendor/distribute-0.6.34/CONTRIBUTORS.txt b/vendor/distribute-0.6.34/CONTRIBUTORS.txt
deleted file mode 100644
index 22c90aba19c744d8b34dd2cfcb1c1eb8101c2573..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/CONTRIBUTORS.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-============
-Contributors
-============
-
-* Alex Grönholm
-* Alice Bevan-McGregor
-* Arfrever Frehtes Taifersar Arahesis
-* Christophe Combelles
-* Daniel Stutzbach
-* Daniel Holth
-* Hanno Schlichting
-* Jannis Leidel
-* Jason R. Coombs
-* Jim Fulton
-* Jonathan Lange
-* Justin Azoff
-* Lennart Regebro
-* Marc Abramowitz
-* Martin von Löwis
-* Noufal Ibrahim
-* Pete Hollobon
-* Philip Jenvey
-* Reinout van Rees
-* Robert Myers
-* Stefan H. Holek
-* Tarek Ziadé
-* Toshio Kuratomi
-
-If you think you name is missing, please add it (alpha order by first name)
-
diff --git a/vendor/distribute-0.6.34/DEVGUIDE.txt b/vendor/distribute-0.6.34/DEVGUIDE.txt
deleted file mode 100644
index 8dcabfd1d7f60066772391a873d687392f476123..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/DEVGUIDE.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-============================
-Quick notes for contributors
-============================
-
-Distribute is using Mercurial.
-
-Grab the code at bitbucket::
-
-    $ hg clone https://bitbucket.org/tarek/distribute
-
-If you want to contribute changes, we recommend you fork the repository on
-bitbucket, commit the changes to your repository, and then make a pull request
-on bitbucket. If you make some changes, don't forget to:
-
-- add a note in CHANGES.txt
-
-And remember that 0.6 (the only development line) is only bug fixes, and the
-APIs should be fully backward compatible with Setuptools.
-
-You can run the tests via::
-
-    $ python setup.py test
diff --git a/vendor/distribute-0.6.34/MANIFEST.in b/vendor/distribute-0.6.34/MANIFEST.in
deleted file mode 100644
index 9837747a223808119196983d12531a01068991a1..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/MANIFEST.in
+++ /dev/null
@@ -1,9 +0,0 @@
-recursive-include setuptools *.py *.txt *.exe
-recursive-include tests *.py *.c *.pyx *.txt
-recursive-include setuptools/tests *.html
-recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html
-recursive-include _markerlib *.py
-include *.py
-include *.txt
-include MANIFEST.in
-include launcher.c
diff --git a/vendor/distribute-0.6.34/PKG-INFO b/vendor/distribute-0.6.34/PKG-INFO
deleted file mode 100644
index d1c8adfed70b975573182f460a62fd11f6d58613..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/PKG-INFO
+++ /dev/null
@@ -1,868 +0,0 @@
-Metadata-Version: 1.1
-Name: distribute
-Version: 0.6.34
-Summary: Easily download, build, install, upgrade, and uninstall Python packages
-Home-page: http://packages.python.org/distribute
-Author: The fellowship of the packaging
-Author-email: distutils-sig@python.org
-License: PSF or ZPL
-Description: ===============================
-        Installing and Using Distribute
-        ===============================
-        
-        .. contents:: **Table of Contents**
-        
-        -----------
-        Disclaimers
-        -----------
-        
-        About the fork
-        ==============
-        
-        `Distribute` is a fork of the `Setuptools` project.
-        
-        Distribute is intended to replace Setuptools as the standard method
-        for working with Python module distributions.
-        
-        The fork has two goals:
-        
-        - Providing a backward compatible version to replace Setuptools
-          and make all distributions that depend on Setuptools work as
-          before, but with less bugs and behaviorial issues.
-        
-          This work is done in the 0.6.x series.
-        
-          Starting with version 0.6.2, Distribute supports Python 3.
-          Installing and using distribute for Python 3 code works exactly
-          the same as for Python 2 code, but Distribute also helps you to support
-          Python 2 and Python 3 from the same source code by letting you run 2to3
-          on the code as a part of the build process, by setting the keyword parameter
-          ``use_2to3`` to True. See http://packages.python.org/distribute for more
-          information.
-        
-        - Refactoring the code, and releasing it in several distributions.
-          This work is being done in the 0.7.x series but not yet released.
-        
-        The roadmap is still evolving, and the page that is up-to-date is
-        located at : `http://packages.python.org/distribute/roadmap`.
-        
-        If you install `Distribute` and want to switch back for any reason to
-        `Setuptools`, get to the `Uninstallation instructions`_ section.
-        
-        More documentation
-        ==================
-        
-        You can get more information in the Sphinx-based documentation, located
-        at http://packages.python.org/distribute. This documentation includes the old
-        Setuptools documentation that is slowly replaced, and brand new content.
-        
-        About the installation process
-        ==============================
-        
-        The `Distribute` installer modifies your installation by de-activating an
-        existing installation of `Setuptools` in a bootstrap process. This process
-        has been tested in various installation schemes and contexts but in case of a
-        bug during this process your Python installation might be left in a broken
-        state. Since all modified files and directories are copied before the
-        installation starts, you will be able to get back to a normal state by reading
-        the instructions in the `Uninstallation instructions`_ section.
-        
-        In any case, it is recommended to save you `site-packages` directory before
-        you start the installation of `Distribute`.
-        
-        -------------------------
-        Installation Instructions
-        -------------------------
-        
-        Distribute is only released as a source distribution.
-        
-        It can be installed using pip, and can be done so with the source tarball,
-        or by using the ``distribute_setup.py`` script provided online.
-        
-        ``distribute_setup.py`` is the simplest and preferred way on all systems.
-        
-        distribute_setup.py
-        ===================
-        
-        Download
-        `distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
-        and execute it, using the Python interpreter of your choice.
-        
-        If your shell has the ``curl`` program you can do::
-        
-            $ curl -O http://python-distribute.org/distribute_setup.py
-            $ python distribute_setup.py
-        
-        Notice this file is also provided in the source release.
-        
-        pip
-        ===
-        
-        Run easy_install or pip::
-        
-            $ pip install distribute
-        
-        Source installation
-        ===================
-        
-        Download the source tarball, uncompress it, then run the install command::
-        
-            $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.34.tar.gz
-            $ tar -xzvf distribute-0.6.34.tar.gz
-            $ cd distribute-0.6.34
-            $ python setup.py install
-        
-        ---------------------------
-        Uninstallation Instructions
-        ---------------------------
-        
-        Like other distutils-based distributions, Distribute doesn't provide an
-        uninstaller yet. It's all done manually! We are all waiting for PEP 376
-        support in Python.
-        
-        Distribute is installed in three steps:
-        
-        1. it gets out of the way an existing installation of Setuptools
-        2. it installs a `fake` setuptools installation
-        3. it installs distribute
-        
-        Distribute can be removed like this:
-        
-        - remove the ``distribute*.egg`` file located in your site-packages directory
-        - remove the ``setuptools.pth`` file located in you site-packages directory
-        - remove the easy_install script located in you ``sys.prefix/bin`` directory
-        - remove the ``setuptools*.egg`` directory located in your site-packages directory,
-          if any.
-        
-        If you want to get back to setuptools:
-        
-        - reinstall setuptools using its instruction.
-        
-        Lastly:
-        
-        - remove the *.OLD.* directory located in your site-packages directory if any,
-          **once you have checked everything was working correctly again**.
-        
-        -------------------------
-        Quick help for developers
-        -------------------------
-        
-        To create an egg which is compatible with Distribute, use the same
-        practice as with Setuptools, e.g.::
-        
-            from setuptools import setup
-        
-            setup(...
-            )
-        
-        To use `pkg_resources` to access data files in the egg, you should
-        require the Setuptools distribution explicitly::
-        
-            from setuptools import setup
-        
-            setup(...
-                install_requires=['setuptools']
-            )
-        
-        Only if you need Distribute-specific functionality should you depend
-        on it explicitly. In this case, replace the Setuptools dependency::
-        
-            from setuptools import setup
-        
-            setup(...
-                install_requires=['distribute']
-            )
-        
-        -----------
-        Install FAQ
-        -----------
-        
-        - **Why is Distribute wrapping my Setuptools installation?**
-        
-           Since Distribute is a fork, and since it provides the same package
-           and modules, it renames the existing Setuptools egg and inserts a
-           new one which merely wraps the Distribute code. This way, full
-           backwards compatibility is kept for packages which rely on the
-           Setuptools modules.
-        
-           At the same time, packages can meet their dependency on Setuptools
-           without actually installing it (which would disable Distribute).
-        
-        - **How does Distribute interact with virtualenv?**
-        
-          Everytime you create a virtualenv it will install setuptools by default.
-          You either need to re-install Distribute in it right after or pass the
-          ``--distribute`` option when creating it.
-        
-          Once installed, your virtualenv will use Distribute transparently.
-        
-          Although, if you have Setuptools installed in your system-wide Python,
-          and if the virtualenv you are in was generated without the `--no-site-packages`
-          option, the Distribute installation will stop.
-        
-          You need in this case to build a virtualenv with the `--no-site-packages`
-          option or to install `Distribute` globally.
-        
-        - **How does Distribute interacts with zc.buildout?**
-        
-          You can use Distribute in your zc.buildout, with the --distribute option,
-          starting at zc.buildout 1.4.2::
-        
-          $ python bootstrap.py --distribute
-        
-          For previous zc.buildout versions, *the only thing* you need to do
-          is use the bootstrap at `http://python-distribute.org/bootstrap.py`.  Run
-          that bootstrap and ``bin/buildout`` (and all other buildout-generated
-          scripts) will transparently use distribute instead of setuptools.  You do
-          not need a specific buildout release.
-        
-          A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
-          left in place unmodified.  So other buildouts that do not yet use the new
-          bootstrap continue to work just fine.  And there is no need to list
-          ``distribute`` somewhere in your eggs: using the bootstrap is enough.
-        
-          The source code for the bootstrap script is located at
-          `http://bitbucket.org/tarek/buildout-distribute`.
-        
-        
-        
-        -----------------------------
-        Feedback and getting involved
-        -----------------------------
-        
-        - Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
-        - Issue tracker: http://bitbucket.org/tarek/distribute/issues/
-        - Code Repository: http://bitbucket.org/tarek/distribute
-        
-        =======
-        CHANGES
-        =======
-        
-        ----------
-        Unreleased
-        ----------
-        
-        + `Issue #341`_: 0.6.33 fails to build under python 2.4
-        
-        ------
-        0.6.33
-        ------
-        
-        * Fix 2 errors with Jython 2.5.
-        * Fix 1 failure with Jython 2.5 and 2.7.
-        * Disable workaround for Jython scripts on Linux systems.
-        * `Issue #336`_: `setup.py` no longer masks failure exit code when tests fail.
-        * Fix issue in pkg_resources where try/except around a platform-dependent
-          import would trigger hook load failures on Mercurial. See pull request 32
-          for details.
-        * `Issue #341`_: Fix a ResourceWarning.
-        
-        ------
-        0.6.32
-        ------
-        
-        * Fix test suite with Python 2.6.
-        * Fix some DeprecationWarnings and ResourceWarnings.
-        * `Issue #335`_: Backed out `setup_requires` superceding installed requirements
-          until regression can be addressed.
-        
-        ------
-        0.6.31
-        ------
-        
-        * `Issue #303`_: Make sure the manifest only ever contains UTF-8 in Python 3.
-        * `Issue #329`_: Properly close files created by tests for compatibility with
-          Jython.
-        * Work around Jython bugs `#1980 <http://bugs.jython.org/`issue1980`_>`_ and
-          `#1981 <http://bugs.jython.org/`issue1981`_>`_.
-        * `Issue #334`_: Provide workaround for packages that reference `sys.__stdout__`
-          such as numpy does. This change should address
-          `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
-          as the system encoding is UTF-8 or the IO encoding is specified in the
-          environment, i.e.::
-        
-             PYTHONIOENCODING=utf8 pip install numpy
-        
-        * Fix for encoding issue when installing from Windows executable on Python 3.
-        * `Issue #323`_: Allow `setup_requires` requirements to supercede installed
-          requirements. Added some new keyword arguments to existing pkg_resources
-          methods. Also had to updated how __path__ is handled for namespace packages
-          to ensure that when a new egg distribution containing a namespace package is
-          placed on sys.path, the entries in __path__ are found in the same order they
-          would have been in had that egg been on the path when pkg_resources was
-          first imported.
-        
-        ------
-        0.6.30
-        ------
-        
-        * `Issue #328`_: Clean up temporary directories in distribute_setup.py.
-        * Fix fatal bug in distribute_setup.py.
-        
-        ------
-        0.6.29
-        ------
-        
-        * Pull Request #14: Honor file permissions in zip files.
-        * `Issue #327`_: Merged pull request #24 to fix a dependency problem with pip.
-        * Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
-        * If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
-          to produce uploadable documentation.
-        * `Issue #326`_: `upload_docs` provided mangled auth credentials under Python 3.
-        * `Issue #320`_: Fix check for "createable" in distribute_setup.py.
-        * `Issue #305`_: Remove a warning that was triggered during normal operations.
-        * `Issue #311`_: Print metadata in UTF-8 independent of platform.
-        * `Issue #303`_: Read manifest file with UTF-8 encoding under Python 3.
-        * `Issue #301`_: Allow to run tests of namespace packages when using 2to3.
-        * `Issue #304`_: Prevent import loop in site.py under Python 3.3.
-        * `Issue #283`_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
-        * `Issue #299`_: The develop command didn't work on Python 3, when using 2to3,
-          as the egg link would go to the Python 2 source. Linking to the 2to3'd code
-          in build/lib makes it work, although you will have to rebuild the module
-          before testing it.
-        * `Issue #306`_: Even if 2to3 is used, we build in-place under Python 2.
-        * `Issue #307`_: Prints the full path when .svn/entries is broken.
-        * `Issue #313`_: Support for sdist subcommands (Python 2.7)
-        * `Issue #314`_: test_local_index() would fail an OS X.
-        * `Issue #310`_: Non-ascii characters in a namespace __init__.py causes errors.
-        * `Issue #218`_: Improved documentation on behavior of `package_data` and
-          `include_package_data`. Files indicated by `package_data` are now included
-          in the manifest.
-        * `distribute_setup.py` now allows a `--download-base` argument for retrieving
-          distribute from a specified location.
-        
-        ------
-        0.6.28
-        ------
-        
-        * `Issue #294`_: setup.py can now be invoked from any directory.
-        * Scripts are now installed honoring the umask.
-        * Added support for .dist-info directories.
-        * `Issue #283`_: Fix and disable scanning of `*.pyc` / `*.pyo` files on
-          Python 3.3.
-        
-        ------
-        0.6.27
-        ------
-        
-        * Support current snapshots of CPython 3.3.
-        * Distribute now recognizes README.rst as a standard, default readme file.
-        * Exclude 'encodings' modules when removing modules from sys.modules.
-          Workaround for #285.
-        * `Issue #231`_: Don't fiddle with system python when used with buildout
-          (bootstrap.py)
-        
-        ------
-        0.6.26
-        ------
-        
-        * `Issue #183`_: Symlinked files are now extracted from source distributions.
-        * `Issue #227`_: Easy_install fetch parameters are now passed during the
-          installation of a source distribution; now fulfillment of setup_requires
-          dependencies will honor the parameters passed to easy_install.
-        
-        ------
-        0.6.25
-        ------
-        
-        * `Issue #258`_: Workaround a cache issue
-        * `Issue #260`_: distribute_setup.py now accepts the --user parameter for
-          Python 2.6 and later.
-        * `Issue #262`_: package_index.open_with_auth no longer throws LookupError
-          on Python 3.
-        * `Issue #269`_: AttributeError when an exception occurs reading Manifest.in
-          on late releases of Python.
-        * `Issue #272`_: Prevent TypeError when namespace package names are unicode
-          and single-install-externally-managed is used. Also fixes PIP `issue
-          449`_.
-        * `Issue #273`_: Legacy script launchers now install with Python2/3 support.
-        
-        ------
-        0.6.24
-        ------
-        
-        * `Issue #249`_: Added options to exclude 2to3 fixers
-        
-        ------
-        0.6.23
-        ------
-        
-        * `Issue #244`_: Fixed a test
-        * `Issue #243`_: Fixed a test
-        * `Issue #239`_: Fixed a test
-        * `Issue #240`_: Fixed a test
-        * `Issue #241`_: Fixed a test
-        * `Issue #237`_: Fixed a test
-        * `Issue #238`_: easy_install now uses 64bit executable wrappers on 64bit Python
-        * `Issue #208`_: Fixed parsed_versions, it now honors post-releases as noted in the documentation
-        * `Issue #207`_: Windows cli and gui wrappers pass CTRL-C to child python process
-        * `Issue #227`_: easy_install now passes its arguments to setup.py bdist_egg
-        * `Issue #225`_: Fixed a NameError on Python 2.5, 2.4
-        
-        ------
-        0.6.21
-        ------
-        
-        * `Issue #225`_: FIxed a regression on py2.4
-        
-        ------
-        0.6.20
-        ------
-        
-        * `Issue #135`_: Include url in warning when processing URLs in package_index.
-        * `Issue #212`_: Fix issue where easy_instal fails on Python 3 on windows installer.
-        * `Issue #213`_: Fix typo in documentation.
-        
-        ------
-        0.6.19
-        ------
-        
-        * `Issue 206`_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
-        
-        ------
-        0.6.18
-        ------
-        
-        * `Issue 210`_: Fixed a regression introduced by `Issue 204`_ fix.
-        
-        ------
-        0.6.17
-        ------
-        
-        * Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
-          variable to allow to disable installation of easy_install-${version} script.
-        * Support Python >=3.1.4 and >=3.2.1.
-        * `Issue 204`_: Don't try to import the parent of a namespace package in
-          declare_namespace
-        * `Issue 196`_: Tolerate responses with multiple Content-Length headers
-        * `Issue 205`_: Sandboxing doesn't preserve working_set. Leads to setup_requires
-          problems.
-        
-        ------
-        0.6.16
-        ------
-        
-        * Builds sdist gztar even on Windows (avoiding `Issue 193`_).
-        * `Issue 192`_: Fixed metadata omitted on Windows when package_dir
-          specified with forward-slash.
-        * `Issue 195`_: Cython build support.
-        * `Issue 200`_: Issues with recognizing 64-bit packages on Windows.
-        
-        ------
-        0.6.15
-        ------
-        
-        * Fixed typo in bdist_egg
-        * Several issues under Python 3 has been solved.
-        * `Issue 146`_: Fixed missing DLL files after easy_install of windows exe package.
-        
-        ------
-        0.6.14
-        ------
-        
-        * `Issue 170`_: Fixed unittest failure. Thanks to Toshio.
-        * `Issue 171`_: Fixed race condition in unittests cause deadlocks in test suite.
-        * `Issue 143`_: Fixed a lookup issue with easy_install.
-          Thanks to David and Zooko.
-        * `Issue 174`_: Fixed the edit mode when its used with setuptools itself
-        
-        ------
-        0.6.13
-        ------
-        
-        * `Issue 160`_: 2.7 gives ValueError("Invalid IPv6 URL")
-        * `Issue 150`_: Fixed using ~/.local even in a --no-site-packages virtualenv
-        * `Issue 163`_: scan index links before external links, and don't use the md5 when
-          comparing two distributions
-        
-        ------
-        0.6.12
-        ------
-        
-        * `Issue 149`_: Fixed various failures on 2.3/2.4
-        
-        ------
-        0.6.11
-        ------
-        
-        * Found another case of SandboxViolation - fixed
-        * `Issue 15`_ and 48: Introduced a socket timeout of 15 seconds on url openings
-        * Added indexsidebar.html into MANIFEST.in
-        * `Issue 108`_: Fixed TypeError with Python3.1
-        * `Issue 121`_: Fixed --help install command trying to actually install.
-        * `Issue 112`_: Added an os.makedirs so that Tarek's solution will work.
-        * `Issue 133`_: Added --no-find-links to easy_install
-        * Added easy_install --user
-        * `Issue 100`_: Fixed develop --user not taking '.' in PYTHONPATH into account
-        * `Issue 134`_: removed spurious UserWarnings. Patch by VanLindberg
-        * `Issue 138`_: cant_write_to_target error when setup_requires is used.
-        * `Issue 147`_: respect the sys.dont_write_bytecode flag
-        
-        ------
-        0.6.10
-        ------
-        
-        * Reverted change made for the DistributionNotFound exception because
-          zc.buildout uses the exception message to get the name of the
-          distribution.
-        
-        -----
-        0.6.9
-        -----
-        
-        * `Issue 90`_: unknown setuptools version can be added in the working set
-        * `Issue 87`_: setupt.py doesn't try to convert distribute_setup.py anymore
-          Initial Patch by arfrever.
-        * `Issue 89`_: added a side bar with a download link to the doc.
-        * `Issue 86`_: fixed missing sentence in pkg_resources doc.
-        * Added a nicer error message when a DistributionNotFound is raised.
-        * `Issue 80`_: test_develop now works with Python 3.1
-        * `Issue 93`_: upload_docs now works if there is an empty sub-directory.
-        * `Issue 70`_: exec bit on non-exec files
-        * `Issue 99`_: now the standalone easy_install command doesn't uses a
-          "setup.cfg" if any exists in the working directory. It will use it
-          only if triggered by ``install_requires`` from a setup.py call
-          (install, develop, etc).
-        * `Issue 101`_: Allowing ``os.devnull`` in Sandbox
-        * `Issue 92`_: Fixed the "no eggs" found error with MacPort
-          (platform.mac_ver() fails)
-        * `Issue 103`_: test_get_script_header_jython_workaround not run
-          anymore under py3 with C or POSIX local. Contributed by Arfrever.
-        * `Issue 104`_: remvoved the assertion when the installation fails,
-          with a nicer message for the end user.
-        * `Issue 100`_: making sure there's no SandboxViolation when
-          the setup script patches setuptools.
-        
-        -----
-        0.6.8
-        -----
-        
-        * Added "check_packages" in dist. (added in Setuptools 0.6c11)
-        * Fixed the DONT_PATCH_SETUPTOOLS state.
-        
-        -----
-        0.6.7
-        -----
-        
-        * `Issue 58`_: Added --user support to the develop command
-        * `Issue 11`_: Generated scripts now wrap their call to the script entry point
-          in the standard "if name == 'main'"
-        * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
-          can drive an installation that doesn't patch a global setuptools.
-        * Reviewed unladen-swallow specific change from
-          http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
-          and determined that it no longer applies. Distribute should work fine with
-          Unladen Swallow 2009Q3.
-        * `Issue 21`_: Allow PackageIndex.open_url to gracefully handle all cases of a
-          httplib.HTTPException instead of just InvalidURL and BadStatusLine.
-        * Removed virtual-python.py from this distribution and updated documentation
-          to point to the actively maintained virtualenv instead.
-        * `Issue 64`_: use_setuptools no longer rebuilds the distribute egg every
-          time it is run
-        * use_setuptools now properly respects the requested version
-        * use_setuptools will no longer try to import a distribute egg for the
-          wrong Python version
-        * `Issue 74`_: no_fake should be True by default.
-        * `Issue 72`_: avoid a bootstrapping issue with easy_install -U
-        
-        -----
-        0.6.6
-        -----
-        
-        * Unified the bootstrap file so it works on both py2.x and py3k without 2to3
-          (patch by Holger Krekel)
-        
-        -----
-        0.6.5
-        -----
-        
-        * `Issue 65`_: cli.exe and gui.exe are now generated at build time,
-          depending on the platform in use.
-        
-        * `Issue 67`_: Fixed doc typo (PEP 381/382)
-        
-        * Distribute no longer shadows setuptools if we require a 0.7-series
-          setuptools.  And an error is raised when installing a 0.7 setuptools with
-          distribute.
-        
-        * When run from within buildout, no attempt is made to modify an existing
-          setuptools egg, whether in a shared egg directory or a system setuptools.
-        
-        * Fixed a hole in sandboxing allowing builtin file to write outside of
-          the sandbox.
-        
-        -----
-        0.6.4
-        -----
-        
-        * Added the generation of `distribute_setup_3k.py` during the release.
-          This closes `issue #52`_.
-        
-        * Added an upload_docs command to easily upload project documentation to
-          PyPI's http://packages.python.org. This close `issue #56`_.
-        
-        * Fixed a bootstrap bug on the use_setuptools() API.
-        
-        -----
-        0.6.3
-        -----
-        
-        setuptools
-        ==========
-        
-        * Fixed a bunch of calls to file() that caused crashes on Python 3.
-        
-        bootstrapping
-        =============
-        
-        * Fixed a bug in sorting that caused bootstrap to fail on Python 3.
-        
-        -----
-        0.6.2
-        -----
-        
-        setuptools
-        ==========
-        
-        * Added Python 3 support; see docs/python3.txt.
-          This closes http://bugs.python.org/setuptools/`issue39`_.
-        
-        * Added option to run 2to3 automatically when installing on Python 3.
-          This closes `issue #31`_.
-        
-        * Fixed invalid usage of requirement.parse, that broke develop -d.
-          This closes http://bugs.python.org/setuptools/`issue44`_.
-        
-        * Fixed script launcher for 64-bit Windows.
-          This closes http://bugs.python.org/setuptools/`issue2`_.
-        
-        * KeyError when compiling extensions.
-          This closes http://bugs.python.org/setuptools/`issue41`_.
-        
-        bootstrapping
-        =============
-        
-        * Fixed bootstrap not working on Windows. This closes `issue #49`_.
-        
-        * Fixed 2.6 dependencies. This closes `issue #50`_.
-        
-        * Make sure setuptools is patched when running through easy_install
-          This closes http://bugs.python.org/setuptools/`issue40`_.
-        
-        -----
-        0.6.1
-        -----
-        
-        setuptools
-        ==========
-        
-        * package_index.urlopen now catches BadStatusLine and malformed url errors.
-          This closes `issue #16`_ and `issue #18`_.
-        
-        * zip_ok is now False by default. This closes
-          http://bugs.python.org/setuptools/`issue33`_.
-        
-        * Fixed invalid URL error catching. http://bugs.python.org/setuptools/`issue20`_.
-        
-        * Fixed invalid bootstraping with easy_install installation (`issue #40`_).
-          Thanks to Florian Schulze for the help.
-        
-        * Removed buildout/bootstrap.py. A new repository will create a specific
-          bootstrap.py script.
-        
-        
-        bootstrapping
-        =============
-        
-        * The boostrap process leave setuptools alone if detected in the system
-          and --root or --prefix is provided, but is not in the same location.
-          This closes `issue #10`_.
-        
-        ---
-        0.6
-        ---
-        
-        setuptools
-        ==========
-        
-        * Packages required at build time where not fully present at install time.
-          This closes `issue #12`_.
-        
-        * Protected against failures in tarfile extraction. This closes `issue #10`_.
-        
-        * Made Jython api_tests.txt doctest compatible. This closes `issue #7`_.
-        
-        * sandbox.py replaced builtin type file with builtin function open. This
-          closes `issue #6`_.
-        
-        * Immediately close all file handles. This closes `issue #3`_.
-        
-        * Added compatibility with Subversion 1.6. This references `issue #1`_.
-        
-        pkg_resources
-        =============
-        
-        * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
-          instead. Based on a patch from ronaldoussoren. This closes `issue #5`_.
-        
-        * Fixed a SandboxViolation for mkdir that could occur in certain cases.
-          This closes `issue #13`_.
-        
-        * Allow to find_on_path on systems with tight permissions to fail gracefully.
-          This closes `issue #9`_.
-        
-        * Corrected inconsistency between documentation and code of add_entry.
-          This closes `issue #8`_.
-        
-        * Immediately close all file handles. This closes `issue #3`_.
-        
-        easy_install
-        ============
-        
-        * Immediately close all file handles. This closes `issue #3`_.
-        
-        
-        .. _`Issue #135`: http://bitbucket.org/tarek/distribute/issue/135
-        .. _`Issue #183`: http://bitbucket.org/tarek/distribute/issue/183
-        .. _`Issue #207`: http://bitbucket.org/tarek/distribute/issue/207
-        .. _`Issue #208`: http://bitbucket.org/tarek/distribute/issue/208
-        .. _`Issue #212`: http://bitbucket.org/tarek/distribute/issue/212
-        .. _`Issue #213`: http://bitbucket.org/tarek/distribute/issue/213
-        .. _`Issue #218`: http://bitbucket.org/tarek/distribute/issue/218
-        .. _`Issue #225`: http://bitbucket.org/tarek/distribute/issue/225
-        .. _`Issue #227`: http://bitbucket.org/tarek/distribute/issue/227
-        .. _`Issue #231`: http://bitbucket.org/tarek/distribute/issue/231
-        .. _`Issue #237`: http://bitbucket.org/tarek/distribute/issue/237
-        .. _`Issue #238`: http://bitbucket.org/tarek/distribute/issue/238
-        .. _`Issue #239`: http://bitbucket.org/tarek/distribute/issue/239
-        .. _`Issue #240`: http://bitbucket.org/tarek/distribute/issue/240
-        .. _`Issue #241`: http://bitbucket.org/tarek/distribute/issue/241
-        .. _`Issue #243`: http://bitbucket.org/tarek/distribute/issue/243
-        .. _`Issue #244`: http://bitbucket.org/tarek/distribute/issue/244
-        .. _`Issue #249`: http://bitbucket.org/tarek/distribute/issue/249
-        .. _`Issue #258`: http://bitbucket.org/tarek/distribute/issue/258
-        .. _`Issue #260`: http://bitbucket.org/tarek/distribute/issue/260
-        .. _`Issue #262`: http://bitbucket.org/tarek/distribute/issue/262
-        .. _`Issue #269`: http://bitbucket.org/tarek/distribute/issue/269
-        .. _`Issue #272`: http://bitbucket.org/tarek/distribute/issue/272
-        .. _`Issue #273`: http://bitbucket.org/tarek/distribute/issue/273
-        .. _`Issue #283`: http://bitbucket.org/tarek/distribute/issue/283
-        .. _`Issue #294`: http://bitbucket.org/tarek/distribute/issue/294
-        .. _`Issue #299`: http://bitbucket.org/tarek/distribute/issue/299
-        .. _`Issue #301`: http://bitbucket.org/tarek/distribute/issue/301
-        .. _`Issue #303`: http://bitbucket.org/tarek/distribute/issue/303
-        .. _`Issue #304`: http://bitbucket.org/tarek/distribute/issue/304
-        .. _`Issue #305`: http://bitbucket.org/tarek/distribute/issue/305
-        .. _`Issue #306`: http://bitbucket.org/tarek/distribute/issue/306
-        .. _`Issue #307`: http://bitbucket.org/tarek/distribute/issue/307
-        .. _`Issue #310`: http://bitbucket.org/tarek/distribute/issue/310
-        .. _`Issue #311`: http://bitbucket.org/tarek/distribute/issue/311
-        .. _`Issue #313`: http://bitbucket.org/tarek/distribute/issue/313
-        .. _`Issue #314`: http://bitbucket.org/tarek/distribute/issue/314
-        .. _`Issue #320`: http://bitbucket.org/tarek/distribute/issue/320
-        .. _`Issue #323`: http://bitbucket.org/tarek/distribute/issue/323
-        .. _`Issue #326`: http://bitbucket.org/tarek/distribute/issue/326
-        .. _`Issue #327`: http://bitbucket.org/tarek/distribute/issue/327
-        .. _`Issue #328`: http://bitbucket.org/tarek/distribute/issue/328
-        .. _`Issue #329`: http://bitbucket.org/tarek/distribute/issue/329
-        .. _`Issue #334`: http://bitbucket.org/tarek/distribute/issue/334
-        .. _`Issue #335`: http://bitbucket.org/tarek/distribute/issue/335
-        .. _`Issue #336`: http://bitbucket.org/tarek/distribute/issue/336
-        .. _`Issue #341`: http://bitbucket.org/tarek/distribute/issue/341
-        .. _`Issue 100`: http://bitbucket.org/tarek/distribute/issue/100
-        .. _`Issue 101`: http://bitbucket.org/tarek/distribute/issue/101
-        .. _`Issue 103`: http://bitbucket.org/tarek/distribute/issue/103
-        .. _`Issue 104`: http://bitbucket.org/tarek/distribute/issue/104
-        .. _`Issue 108`: http://bitbucket.org/tarek/distribute/issue/108
-        .. _`Issue 11`: http://bitbucket.org/tarek/distribute/issue/11
-        .. _`Issue 112`: http://bitbucket.org/tarek/distribute/issue/112
-        .. _`Issue 121`: http://bitbucket.org/tarek/distribute/issue/121
-        .. _`Issue 133`: http://bitbucket.org/tarek/distribute/issue/133
-        .. _`Issue 134`: http://bitbucket.org/tarek/distribute/issue/134
-        .. _`Issue 138`: http://bitbucket.org/tarek/distribute/issue/138
-        .. _`Issue 143`: http://bitbucket.org/tarek/distribute/issue/143
-        .. _`Issue 146`: http://bitbucket.org/tarek/distribute/issue/146
-        .. _`Issue 147`: http://bitbucket.org/tarek/distribute/issue/147
-        .. _`Issue 149`: http://bitbucket.org/tarek/distribute/issue/149
-        .. _`Issue 15`: http://bitbucket.org/tarek/distribute/issue/15
-        .. _`Issue 150`: http://bitbucket.org/tarek/distribute/issue/150
-        .. _`Issue 160`: http://bitbucket.org/tarek/distribute/issue/160
-        .. _`Issue 163`: http://bitbucket.org/tarek/distribute/issue/163
-        .. _`Issue 170`: http://bitbucket.org/tarek/distribute/issue/170
-        .. _`Issue 171`: http://bitbucket.org/tarek/distribute/issue/171
-        .. _`Issue 174`: http://bitbucket.org/tarek/distribute/issue/174
-        .. _`Issue 192`: http://bitbucket.org/tarek/distribute/issue/192
-        .. _`Issue 193`: http://bitbucket.org/tarek/distribute/issue/193
-        .. _`Issue 195`: http://bitbucket.org/tarek/distribute/issue/195
-        .. _`Issue 196`: http://bitbucket.org/tarek/distribute/issue/196
-        .. _`Issue 200`: http://bitbucket.org/tarek/distribute/issue/200
-        .. _`Issue 204`: http://bitbucket.org/tarek/distribute/issue/204
-        .. _`Issue 205`: http://bitbucket.org/tarek/distribute/issue/205
-        .. _`Issue 206`: http://bitbucket.org/tarek/distribute/issue/206
-        .. _`Issue 21`: http://bitbucket.org/tarek/distribute/issue/21
-        .. _`Issue 210`: http://bitbucket.org/tarek/distribute/issue/210
-        .. _`Issue 58`: http://bitbucket.org/tarek/distribute/issue/58
-        .. _`Issue 64`: http://bitbucket.org/tarek/distribute/issue/64
-        .. _`Issue 65`: http://bitbucket.org/tarek/distribute/issue/65
-        .. _`Issue 67`: http://bitbucket.org/tarek/distribute/issue/67
-        .. _`Issue 70`: http://bitbucket.org/tarek/distribute/issue/70
-        .. _`Issue 72`: http://bitbucket.org/tarek/distribute/issue/72
-        .. _`Issue 74`: http://bitbucket.org/tarek/distribute/issue/74
-        .. _`Issue 80`: http://bitbucket.org/tarek/distribute/issue/80
-        .. _`Issue 86`: http://bitbucket.org/tarek/distribute/issue/86
-        .. _`Issue 87`: http://bitbucket.org/tarek/distribute/issue/87
-        .. _`Issue 89`: http://bitbucket.org/tarek/distribute/issue/89
-        .. _`Issue 90`: http://bitbucket.org/tarek/distribute/issue/90
-        .. _`Issue 92`: http://bitbucket.org/tarek/distribute/issue/92
-        .. _`Issue 93`: http://bitbucket.org/tarek/distribute/issue/93
-        .. _`Issue 99`: http://bitbucket.org/tarek/distribute/issue/99
-        .. _`issue
-          449`: http://bitbucket.org/tarek/distribute/issue/449
-        .. _`issue #1`: http://bitbucket.org/tarek/distribute/issue/1
-        .. _`issue #10`: http://bitbucket.org/tarek/distribute/issue/10
-        .. _`issue #12`: http://bitbucket.org/tarek/distribute/issue/12
-        .. _`issue #13`: http://bitbucket.org/tarek/distribute/issue/13
-        .. _`issue #16`: http://bitbucket.org/tarek/distribute/issue/16
-        .. _`issue #18`: http://bitbucket.org/tarek/distribute/issue/18
-        .. _`issue #3`: http://bitbucket.org/tarek/distribute/issue/3
-        .. _`issue #31`: http://bitbucket.org/tarek/distribute/issue/31
-        .. _`issue #40`: http://bitbucket.org/tarek/distribute/issue/40
-        .. _`issue #49`: http://bitbucket.org/tarek/distribute/issue/49
-        .. _`issue #5`: http://bitbucket.org/tarek/distribute/issue/5
-        .. _`issue #50`: http://bitbucket.org/tarek/distribute/issue/50
-        .. _`issue #52`: http://bitbucket.org/tarek/distribute/issue/52
-        .. _`issue #56`: http://bitbucket.org/tarek/distribute/issue/56
-        .. _`issue #6`: http://bitbucket.org/tarek/distribute/issue/6
-        .. _`issue #7`: http://bitbucket.org/tarek/distribute/issue/7
-        .. _`issue #8`: http://bitbucket.org/tarek/distribute/issue/8
-        .. _`issue #9`: http://bitbucket.org/tarek/distribute/issue/9
-        .. _`issue1980`: http://bitbucket.org/tarek/distribute/issue/1980
-        .. _`issue1981`: http://bitbucket.org/tarek/distribute/issue/1981
-        .. _`issue2`: http://bitbucket.org/tarek/distribute/issue/2
-        .. _`issue20`: http://bitbucket.org/tarek/distribute/issue/20
-        .. _`issue33`: http://bitbucket.org/tarek/distribute/issue/33
-        .. _`issue39`: http://bitbucket.org/tarek/distribute/issue/39
-        .. _`issue40`: http://bitbucket.org/tarek/distribute/issue/40
-        .. _`issue41`: http://bitbucket.org/tarek/distribute/issue/41
-        .. _`issue44`: http://bitbucket.org/tarek/distribute/issue/44
-        
-        
-Keywords: CPAN PyPI distutils eggs package management
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Python Software Foundation License
-Classifier: License :: OSI Approved :: Zope Public License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2.4
-Classifier: Programming Language :: Python :: 2.5
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.1
-Classifier: Programming Language :: Python :: 3.2
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: System :: Archiving :: Packaging
-Classifier: Topic :: System :: Systems Administration
-Classifier: Topic :: Utilities
diff --git a/vendor/distribute-0.6.34/README.txt b/vendor/distribute-0.6.34/README.txt
deleted file mode 100644
index 0603ba47e61f19e5149df62cc84351a7def7aa62..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/README.txt
+++ /dev/null
@@ -1,228 +0,0 @@
-===============================
-Installing and Using Distribute
-===============================
-
-.. contents:: **Table of Contents**
-
------------
-Disclaimers
------------
-
-About the fork
-==============
-
-`Distribute` is a fork of the `Setuptools` project.
-
-Distribute is intended to replace Setuptools as the standard method
-for working with Python module distributions.
-
-The fork has two goals:
-
-- Providing a backward compatible version to replace Setuptools
-  and make all distributions that depend on Setuptools work as
-  before, but with less bugs and behaviorial issues.
-
-  This work is done in the 0.6.x series.
-
-  Starting with version 0.6.2, Distribute supports Python 3.
-  Installing and using distribute for Python 3 code works exactly
-  the same as for Python 2 code, but Distribute also helps you to support
-  Python 2 and Python 3 from the same source code by letting you run 2to3
-  on the code as a part of the build process, by setting the keyword parameter
-  ``use_2to3`` to True. See http://packages.python.org/distribute for more
-  information.
-
-- Refactoring the code, and releasing it in several distributions.
-  This work is being done in the 0.7.x series but not yet released.
-
-The roadmap is still evolving, and the page that is up-to-date is
-located at : `http://packages.python.org/distribute/roadmap`.
-
-If you install `Distribute` and want to switch back for any reason to
-`Setuptools`, get to the `Uninstallation instructions`_ section.
-
-More documentation
-==================
-
-You can get more information in the Sphinx-based documentation, located
-at http://packages.python.org/distribute. This documentation includes the old
-Setuptools documentation that is slowly replaced, and brand new content.
-
-About the installation process
-==============================
-
-The `Distribute` installer modifies your installation by de-activating an
-existing installation of `Setuptools` in a bootstrap process. This process
-has been tested in various installation schemes and contexts but in case of a
-bug during this process your Python installation might be left in a broken
-state. Since all modified files and directories are copied before the
-installation starts, you will be able to get back to a normal state by reading
-the instructions in the `Uninstallation instructions`_ section.
-
-In any case, it is recommended to save you `site-packages` directory before
-you start the installation of `Distribute`.
-
--------------------------
-Installation Instructions
--------------------------
-
-Distribute is only released as a source distribution.
-
-It can be installed using pip, and can be done so with the source tarball,
-or by using the ``distribute_setup.py`` script provided online.
-
-``distribute_setup.py`` is the simplest and preferred way on all systems.
-
-distribute_setup.py
-===================
-
-Download
-`distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
-and execute it, using the Python interpreter of your choice.
-
-If your shell has the ``curl`` program you can do::
-
-    $ curl -O http://python-distribute.org/distribute_setup.py
-    $ python distribute_setup.py
-
-Notice this file is also provided in the source release.
-
-pip
-===
-
-Run easy_install or pip::
-
-    $ pip install distribute
-
-Source installation
-===================
-
-Download the source tarball, uncompress it, then run the install command::
-
-    $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.34.tar.gz
-    $ tar -xzvf distribute-0.6.34.tar.gz
-    $ cd distribute-0.6.34
-    $ python setup.py install
-
----------------------------
-Uninstallation Instructions
----------------------------
-
-Like other distutils-based distributions, Distribute doesn't provide an
-uninstaller yet. It's all done manually! We are all waiting for PEP 376
-support in Python.
-
-Distribute is installed in three steps:
-
-1. it gets out of the way an existing installation of Setuptools
-2. it installs a `fake` setuptools installation
-3. it installs distribute
-
-Distribute can be removed like this:
-
-- remove the ``distribute*.egg`` file located in your site-packages directory
-- remove the ``setuptools.pth`` file located in you site-packages directory
-- remove the easy_install script located in you ``sys.prefix/bin`` directory
-- remove the ``setuptools*.egg`` directory located in your site-packages directory,
-  if any.
-
-If you want to get back to setuptools:
-
-- reinstall setuptools using its instruction.
-
-Lastly:
-
-- remove the *.OLD.* directory located in your site-packages directory if any,
-  **once you have checked everything was working correctly again**.
-
--------------------------
-Quick help for developers
--------------------------
-
-To create an egg which is compatible with Distribute, use the same
-practice as with Setuptools, e.g.::
-
-    from setuptools import setup
-
-    setup(...
-    )
-
-To use `pkg_resources` to access data files in the egg, you should
-require the Setuptools distribution explicitly::
-
-    from setuptools import setup
-
-    setup(...
-        install_requires=['setuptools']
-    )
-
-Only if you need Distribute-specific functionality should you depend
-on it explicitly. In this case, replace the Setuptools dependency::
-
-    from setuptools import setup
-
-    setup(...
-        install_requires=['distribute']
-    )
-
------------
-Install FAQ
------------
-
-- **Why is Distribute wrapping my Setuptools installation?**
-
-   Since Distribute is a fork, and since it provides the same package
-   and modules, it renames the existing Setuptools egg and inserts a
-   new one which merely wraps the Distribute code. This way, full
-   backwards compatibility is kept for packages which rely on the
-   Setuptools modules.
-
-   At the same time, packages can meet their dependency on Setuptools
-   without actually installing it (which would disable Distribute).
-
-- **How does Distribute interact with virtualenv?**
-
-  Everytime you create a virtualenv it will install setuptools by default.
-  You either need to re-install Distribute in it right after or pass the
-  ``--distribute`` option when creating it.
-
-  Once installed, your virtualenv will use Distribute transparently.
-
-  Although, if you have Setuptools installed in your system-wide Python,
-  and if the virtualenv you are in was generated without the `--no-site-packages`
-  option, the Distribute installation will stop.
-
-  You need in this case to build a virtualenv with the `--no-site-packages`
-  option or to install `Distribute` globally.
-
-- **How does Distribute interacts with zc.buildout?**
-
-  You can use Distribute in your zc.buildout, with the --distribute option,
-  starting at zc.buildout 1.4.2::
-
-  $ python bootstrap.py --distribute
-
-  For previous zc.buildout versions, *the only thing* you need to do
-  is use the bootstrap at `http://python-distribute.org/bootstrap.py`.  Run
-  that bootstrap and ``bin/buildout`` (and all other buildout-generated
-  scripts) will transparently use distribute instead of setuptools.  You do
-  not need a specific buildout release.
-
-  A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
-  left in place unmodified.  So other buildouts that do not yet use the new
-  bootstrap continue to work just fine.  And there is no need to list
-  ``distribute`` somewhere in your eggs: using the bootstrap is enough.
-
-  The source code for the bootstrap script is located at
-  `http://bitbucket.org/tarek/buildout-distribute`.
-
-
-
------------------------------
-Feedback and getting involved
------------------------------
-
-- Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
-- Issue tracker: http://bitbucket.org/tarek/distribute/issues/
-- Code Repository: http://bitbucket.org/tarek/distribute
-
diff --git a/vendor/distribute-0.6.34/_markerlib/__init__.py b/vendor/distribute-0.6.34/_markerlib/__init__.py
deleted file mode 100644
index e2b237b1f6444537f1243a8027dc19a0d8bc7b74..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/_markerlib/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-try:
-    import ast
-    from _markerlib.markers import default_environment, compile, interpret
-except ImportError:
-    if 'ast' in globals():
-        raise
-    def default_environment():
-        return {}
-    def compile(marker):
-        def marker_fn(environment=None, override=None):
-            # 'empty markers are True' heuristic won't install extra deps.
-            return not marker.strip()
-        marker_fn.__doc__ = marker
-        return marker_fn
-    def interpret(marker, environment=None, override=None):
-        return compile(marker)()
diff --git a/vendor/distribute-0.6.34/_markerlib/markers.py b/vendor/distribute-0.6.34/_markerlib/markers.py
deleted file mode 100644
index c93d7f3b671f8234de9a08e68d5f1d194b507c55..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/_markerlib/markers.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Interpret PEP 345 environment markers.
-
-EXPR [in|==|!=|not in] EXPR [or|and] ...
-
-where EXPR belongs to any of those:
-
-    python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
-    python_full_version = sys.version.split()[0]
-    os.name = os.name
-    sys.platform = sys.platform
-    platform.version = platform.version()
-    platform.machine = platform.machine()
-    platform.python_implementation = platform.python_implementation()
-    a free string, like '2.6', or 'win32'
-"""
-
-__all__ = ['default_environment', 'compile', 'interpret']
-
-import ast
-import os
-import platform
-import sys
-import weakref
-
-_builtin_compile = compile
-
-try:
-    from platform import python_implementation
-except ImportError:
-    if os.name == "java":
-        # Jython 2.5 has ast module, but not platform.python_implementation() function.
-        def python_implementation():
-            return "Jython"
-    else:
-        raise
-
-
-# restricted set of variables
-_VARS = {'sys.platform': sys.platform,
-         'python_version': '%s.%s' % sys.version_info[:2],
-         # FIXME parsing sys.platform is not reliable, but there is no other
-         # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
-         'python_full_version': sys.version.split(' ', 1)[0],
-         'os.name': os.name,
-         'platform.version': platform.version(),
-         'platform.machine': platform.machine(),
-         'platform.python_implementation': python_implementation(),
-         'extra': None # wheel extension
-        }
-
-def default_environment():
-    """Return copy of default PEP 385 globals dictionary."""
-    return dict(_VARS)
-
-class ASTWhitelist(ast.NodeTransformer):
-    def __init__(self, statement):
-        self.statement = statement # for error messages
-
-    ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
-    # Bool operations
-    ALLOWED += (ast.And, ast.Or)
-    # Comparison operations
-    ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
-
-    def visit(self, node):
-        """Ensure statement only contains allowed nodes."""
-        if not isinstance(node, self.ALLOWED):
-            raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
-                               (self.statement,
-                               (' ' * node.col_offset) + '^'))
-        return ast.NodeTransformer.visit(self, node)
-
-    def visit_Attribute(self, node):
-        """Flatten one level of attribute access."""
-        new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
-        return ast.copy_location(new_node, node)
-
-def parse_marker(marker):
-    tree = ast.parse(marker, mode='eval')
-    new_tree = ASTWhitelist(marker).generic_visit(tree)
-    return new_tree
-
-def compile_marker(parsed_marker):
-    return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
-                   dont_inherit=True)
-
-_cache = weakref.WeakValueDictionary()
-
-def compile(marker):
-    """Return compiled marker as a function accepting an environment dict."""
-    try:
-        return _cache[marker]
-    except KeyError:
-        pass
-    if not marker.strip():
-        def marker_fn(environment=None, override=None):
-            """"""
-            return True
-    else:
-        compiled_marker = compile_marker(parse_marker(marker))
-        def marker_fn(environment=None, override=None):
-            """override updates environment"""
-            if override is None:
-                override = {}
-            if environment is None:
-                environment = default_environment()
-            environment.update(override)
-            return eval(compiled_marker, environment)
-    marker_fn.__doc__ = marker
-    _cache[marker] = marker_fn
-    return _cache[marker]
-
-def interpret(marker, environment=None):
-    return compile(marker)(environment)
diff --git a/vendor/distribute-0.6.34/distribute.egg-info/PKG-INFO b/vendor/distribute-0.6.34/distribute.egg-info/PKG-INFO
deleted file mode 100644
index d1c8adfed70b975573182f460a62fd11f6d58613..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute.egg-info/PKG-INFO
+++ /dev/null
@@ -1,868 +0,0 @@
-Metadata-Version: 1.1
-Name: distribute
-Version: 0.6.34
-Summary: Easily download, build, install, upgrade, and uninstall Python packages
-Home-page: http://packages.python.org/distribute
-Author: The fellowship of the packaging
-Author-email: distutils-sig@python.org
-License: PSF or ZPL
-Description: ===============================
-        Installing and Using Distribute
-        ===============================
-        
-        .. contents:: **Table of Contents**
-        
-        -----------
-        Disclaimers
-        -----------
-        
-        About the fork
-        ==============
-        
-        `Distribute` is a fork of the `Setuptools` project.
-        
-        Distribute is intended to replace Setuptools as the standard method
-        for working with Python module distributions.
-        
-        The fork has two goals:
-        
-        - Providing a backward compatible version to replace Setuptools
-          and make all distributions that depend on Setuptools work as
-          before, but with less bugs and behaviorial issues.
-        
-          This work is done in the 0.6.x series.
-        
-          Starting with version 0.6.2, Distribute supports Python 3.
-          Installing and using distribute for Python 3 code works exactly
-          the same as for Python 2 code, but Distribute also helps you to support
-          Python 2 and Python 3 from the same source code by letting you run 2to3
-          on the code as a part of the build process, by setting the keyword parameter
-          ``use_2to3`` to True. See http://packages.python.org/distribute for more
-          information.
-        
-        - Refactoring the code, and releasing it in several distributions.
-          This work is being done in the 0.7.x series but not yet released.
-        
-        The roadmap is still evolving, and the page that is up-to-date is
-        located at : `http://packages.python.org/distribute/roadmap`.
-        
-        If you install `Distribute` and want to switch back for any reason to
-        `Setuptools`, get to the `Uninstallation instructions`_ section.
-        
-        More documentation
-        ==================
-        
-        You can get more information in the Sphinx-based documentation, located
-        at http://packages.python.org/distribute. This documentation includes the old
-        Setuptools documentation that is slowly replaced, and brand new content.
-        
-        About the installation process
-        ==============================
-        
-        The `Distribute` installer modifies your installation by de-activating an
-        existing installation of `Setuptools` in a bootstrap process. This process
-        has been tested in various installation schemes and contexts but in case of a
-        bug during this process your Python installation might be left in a broken
-        state. Since all modified files and directories are copied before the
-        installation starts, you will be able to get back to a normal state by reading
-        the instructions in the `Uninstallation instructions`_ section.
-        
-        In any case, it is recommended to save you `site-packages` directory before
-        you start the installation of `Distribute`.
-        
-        -------------------------
-        Installation Instructions
-        -------------------------
-        
-        Distribute is only released as a source distribution.
-        
-        It can be installed using pip, and can be done so with the source tarball,
-        or by using the ``distribute_setup.py`` script provided online.
-        
-        ``distribute_setup.py`` is the simplest and preferred way on all systems.
-        
-        distribute_setup.py
-        ===================
-        
-        Download
-        `distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
-        and execute it, using the Python interpreter of your choice.
-        
-        If your shell has the ``curl`` program you can do::
-        
-            $ curl -O http://python-distribute.org/distribute_setup.py
-            $ python distribute_setup.py
-        
-        Notice this file is also provided in the source release.
-        
-        pip
-        ===
-        
-        Run easy_install or pip::
-        
-            $ pip install distribute
-        
-        Source installation
-        ===================
-        
-        Download the source tarball, uncompress it, then run the install command::
-        
-            $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.34.tar.gz
-            $ tar -xzvf distribute-0.6.34.tar.gz
-            $ cd distribute-0.6.34
-            $ python setup.py install
-        
-        ---------------------------
-        Uninstallation Instructions
-        ---------------------------
-        
-        Like other distutils-based distributions, Distribute doesn't provide an
-        uninstaller yet. It's all done manually! We are all waiting for PEP 376
-        support in Python.
-        
-        Distribute is installed in three steps:
-        
-        1. it gets out of the way an existing installation of Setuptools
-        2. it installs a `fake` setuptools installation
-        3. it installs distribute
-        
-        Distribute can be removed like this:
-        
-        - remove the ``distribute*.egg`` file located in your site-packages directory
-        - remove the ``setuptools.pth`` file located in you site-packages directory
-        - remove the easy_install script located in you ``sys.prefix/bin`` directory
-        - remove the ``setuptools*.egg`` directory located in your site-packages directory,
-          if any.
-        
-        If you want to get back to setuptools:
-        
-        - reinstall setuptools using its instruction.
-        
-        Lastly:
-        
-        - remove the *.OLD.* directory located in your site-packages directory if any,
-          **once you have checked everything was working correctly again**.
-        
-        -------------------------
-        Quick help for developers
-        -------------------------
-        
-        To create an egg which is compatible with Distribute, use the same
-        practice as with Setuptools, e.g.::
-        
-            from setuptools import setup
-        
-            setup(...
-            )
-        
-        To use `pkg_resources` to access data files in the egg, you should
-        require the Setuptools distribution explicitly::
-        
-            from setuptools import setup
-        
-            setup(...
-                install_requires=['setuptools']
-            )
-        
-        Only if you need Distribute-specific functionality should you depend
-        on it explicitly. In this case, replace the Setuptools dependency::
-        
-            from setuptools import setup
-        
-            setup(...
-                install_requires=['distribute']
-            )
-        
-        -----------
-        Install FAQ
-        -----------
-        
-        - **Why is Distribute wrapping my Setuptools installation?**
-        
-           Since Distribute is a fork, and since it provides the same package
-           and modules, it renames the existing Setuptools egg and inserts a
-           new one which merely wraps the Distribute code. This way, full
-           backwards compatibility is kept for packages which rely on the
-           Setuptools modules.
-        
-           At the same time, packages can meet their dependency on Setuptools
-           without actually installing it (which would disable Distribute).
-        
-        - **How does Distribute interact with virtualenv?**
-        
-          Everytime you create a virtualenv it will install setuptools by default.
-          You either need to re-install Distribute in it right after or pass the
-          ``--distribute`` option when creating it.
-        
-          Once installed, your virtualenv will use Distribute transparently.
-        
-          Although, if you have Setuptools installed in your system-wide Python,
-          and if the virtualenv you are in was generated without the `--no-site-packages`
-          option, the Distribute installation will stop.
-        
-          You need in this case to build a virtualenv with the `--no-site-packages`
-          option or to install `Distribute` globally.
-        
-        - **How does Distribute interacts with zc.buildout?**
-        
-          You can use Distribute in your zc.buildout, with the --distribute option,
-          starting at zc.buildout 1.4.2::
-        
-          $ python bootstrap.py --distribute
-        
-          For previous zc.buildout versions, *the only thing* you need to do
-          is use the bootstrap at `http://python-distribute.org/bootstrap.py`.  Run
-          that bootstrap and ``bin/buildout`` (and all other buildout-generated
-          scripts) will transparently use distribute instead of setuptools.  You do
-          not need a specific buildout release.
-        
-          A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
-          left in place unmodified.  So other buildouts that do not yet use the new
-          bootstrap continue to work just fine.  And there is no need to list
-          ``distribute`` somewhere in your eggs: using the bootstrap is enough.
-        
-          The source code for the bootstrap script is located at
-          `http://bitbucket.org/tarek/buildout-distribute`.
-        
-        
-        
-        -----------------------------
-        Feedback and getting involved
-        -----------------------------
-        
-        - Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
-        - Issue tracker: http://bitbucket.org/tarek/distribute/issues/
-        - Code Repository: http://bitbucket.org/tarek/distribute
-        
-        =======
-        CHANGES
-        =======
-        
-        ----------
-        Unreleased
-        ----------
-        
-        + `Issue #341`_: 0.6.33 fails to build under python 2.4
-        
-        ------
-        0.6.33
-        ------
-        
-        * Fix 2 errors with Jython 2.5.
-        * Fix 1 failure with Jython 2.5 and 2.7.
-        * Disable workaround for Jython scripts on Linux systems.
-        * `Issue #336`_: `setup.py` no longer masks failure exit code when tests fail.
-        * Fix issue in pkg_resources where try/except around a platform-dependent
-          import would trigger hook load failures on Mercurial. See pull request 32
-          for details.
-        * `Issue #341`_: Fix a ResourceWarning.
-        
-        ------
-        0.6.32
-        ------
-        
-        * Fix test suite with Python 2.6.
-        * Fix some DeprecationWarnings and ResourceWarnings.
-        * `Issue #335`_: Backed out `setup_requires` superceding installed requirements
-          until regression can be addressed.
-        
-        ------
-        0.6.31
-        ------
-        
-        * `Issue #303`_: Make sure the manifest only ever contains UTF-8 in Python 3.
-        * `Issue #329`_: Properly close files created by tests for compatibility with
-          Jython.
-        * Work around Jython bugs `#1980 <http://bugs.jython.org/`issue1980`_>`_ and
-          `#1981 <http://bugs.jython.org/`issue1981`_>`_.
-        * `Issue #334`_: Provide workaround for packages that reference `sys.__stdout__`
-          such as numpy does. This change should address
-          `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
-          as the system encoding is UTF-8 or the IO encoding is specified in the
-          environment, i.e.::
-        
-             PYTHONIOENCODING=utf8 pip install numpy
-        
-        * Fix for encoding issue when installing from Windows executable on Python 3.
-        * `Issue #323`_: Allow `setup_requires` requirements to supercede installed
-          requirements. Added some new keyword arguments to existing pkg_resources
-          methods. Also had to updated how __path__ is handled for namespace packages
-          to ensure that when a new egg distribution containing a namespace package is
-          placed on sys.path, the entries in __path__ are found in the same order they
-          would have been in had that egg been on the path when pkg_resources was
-          first imported.
-        
-        ------
-        0.6.30
-        ------
-        
-        * `Issue #328`_: Clean up temporary directories in distribute_setup.py.
-        * Fix fatal bug in distribute_setup.py.
-        
-        ------
-        0.6.29
-        ------
-        
-        * Pull Request #14: Honor file permissions in zip files.
-        * `Issue #327`_: Merged pull request #24 to fix a dependency problem with pip.
-        * Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
-        * If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
-          to produce uploadable documentation.
-        * `Issue #326`_: `upload_docs` provided mangled auth credentials under Python 3.
-        * `Issue #320`_: Fix check for "createable" in distribute_setup.py.
-        * `Issue #305`_: Remove a warning that was triggered during normal operations.
-        * `Issue #311`_: Print metadata in UTF-8 independent of platform.
-        * `Issue #303`_: Read manifest file with UTF-8 encoding under Python 3.
-        * `Issue #301`_: Allow to run tests of namespace packages when using 2to3.
-        * `Issue #304`_: Prevent import loop in site.py under Python 3.3.
-        * `Issue #283`_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
-        * `Issue #299`_: The develop command didn't work on Python 3, when using 2to3,
-          as the egg link would go to the Python 2 source. Linking to the 2to3'd code
-          in build/lib makes it work, although you will have to rebuild the module
-          before testing it.
-        * `Issue #306`_: Even if 2to3 is used, we build in-place under Python 2.
-        * `Issue #307`_: Prints the full path when .svn/entries is broken.
-        * `Issue #313`_: Support for sdist subcommands (Python 2.7)
-        * `Issue #314`_: test_local_index() would fail an OS X.
-        * `Issue #310`_: Non-ascii characters in a namespace __init__.py causes errors.
-        * `Issue #218`_: Improved documentation on behavior of `package_data` and
-          `include_package_data`. Files indicated by `package_data` are now included
-          in the manifest.
-        * `distribute_setup.py` now allows a `--download-base` argument for retrieving
-          distribute from a specified location.
-        
-        ------
-        0.6.28
-        ------
-        
-        * `Issue #294`_: setup.py can now be invoked from any directory.
-        * Scripts are now installed honoring the umask.
-        * Added support for .dist-info directories.
-        * `Issue #283`_: Fix and disable scanning of `*.pyc` / `*.pyo` files on
-          Python 3.3.
-        
-        ------
-        0.6.27
-        ------
-        
-        * Support current snapshots of CPython 3.3.
-        * Distribute now recognizes README.rst as a standard, default readme file.
-        * Exclude 'encodings' modules when removing modules from sys.modules.
-          Workaround for #285.
-        * `Issue #231`_: Don't fiddle with system python when used with buildout
-          (bootstrap.py)
-        
-        ------
-        0.6.26
-        ------
-        
-        * `Issue #183`_: Symlinked files are now extracted from source distributions.
-        * `Issue #227`_: Easy_install fetch parameters are now passed during the
-          installation of a source distribution; now fulfillment of setup_requires
-          dependencies will honor the parameters passed to easy_install.
-        
-        ------
-        0.6.25
-        ------
-        
-        * `Issue #258`_: Workaround a cache issue
-        * `Issue #260`_: distribute_setup.py now accepts the --user parameter for
-          Python 2.6 and later.
-        * `Issue #262`_: package_index.open_with_auth no longer throws LookupError
-          on Python 3.
-        * `Issue #269`_: AttributeError when an exception occurs reading Manifest.in
-          on late releases of Python.
-        * `Issue #272`_: Prevent TypeError when namespace package names are unicode
-          and single-install-externally-managed is used. Also fixes PIP `issue
-          449`_.
-        * `Issue #273`_: Legacy script launchers now install with Python2/3 support.
-        
-        ------
-        0.6.24
-        ------
-        
-        * `Issue #249`_: Added options to exclude 2to3 fixers
-        
-        ------
-        0.6.23
-        ------
-        
-        * `Issue #244`_: Fixed a test
-        * `Issue #243`_: Fixed a test
-        * `Issue #239`_: Fixed a test
-        * `Issue #240`_: Fixed a test
-        * `Issue #241`_: Fixed a test
-        * `Issue #237`_: Fixed a test
-        * `Issue #238`_: easy_install now uses 64bit executable wrappers on 64bit Python
-        * `Issue #208`_: Fixed parsed_versions, it now honors post-releases as noted in the documentation
-        * `Issue #207`_: Windows cli and gui wrappers pass CTRL-C to child python process
-        * `Issue #227`_: easy_install now passes its arguments to setup.py bdist_egg
-        * `Issue #225`_: Fixed a NameError on Python 2.5, 2.4
-        
-        ------
-        0.6.21
-        ------
-        
-        * `Issue #225`_: FIxed a regression on py2.4
-        
-        ------
-        0.6.20
-        ------
-        
-        * `Issue #135`_: Include url in warning when processing URLs in package_index.
-        * `Issue #212`_: Fix issue where easy_instal fails on Python 3 on windows installer.
-        * `Issue #213`_: Fix typo in documentation.
-        
-        ------
-        0.6.19
-        ------
-        
-        * `Issue 206`_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
-        
-        ------
-        0.6.18
-        ------
-        
-        * `Issue 210`_: Fixed a regression introduced by `Issue 204`_ fix.
-        
-        ------
-        0.6.17
-        ------
-        
-        * Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
-          variable to allow to disable installation of easy_install-${version} script.
-        * Support Python >=3.1.4 and >=3.2.1.
-        * `Issue 204`_: Don't try to import the parent of a namespace package in
-          declare_namespace
-        * `Issue 196`_: Tolerate responses with multiple Content-Length headers
-        * `Issue 205`_: Sandboxing doesn't preserve working_set. Leads to setup_requires
-          problems.
-        
-        ------
-        0.6.16
-        ------
-        
-        * Builds sdist gztar even on Windows (avoiding `Issue 193`_).
-        * `Issue 192`_: Fixed metadata omitted on Windows when package_dir
-          specified with forward-slash.
-        * `Issue 195`_: Cython build support.
-        * `Issue 200`_: Issues with recognizing 64-bit packages on Windows.
-        
-        ------
-        0.6.15
-        ------
-        
-        * Fixed typo in bdist_egg
-        * Several issues under Python 3 has been solved.
-        * `Issue 146`_: Fixed missing DLL files after easy_install of windows exe package.
-        
-        ------
-        0.6.14
-        ------
-        
-        * `Issue 170`_: Fixed unittest failure. Thanks to Toshio.
-        * `Issue 171`_: Fixed race condition in unittests cause deadlocks in test suite.
-        * `Issue 143`_: Fixed a lookup issue with easy_install.
-          Thanks to David and Zooko.
-        * `Issue 174`_: Fixed the edit mode when its used with setuptools itself
-        
-        ------
-        0.6.13
-        ------
-        
-        * `Issue 160`_: 2.7 gives ValueError("Invalid IPv6 URL")
-        * `Issue 150`_: Fixed using ~/.local even in a --no-site-packages virtualenv
-        * `Issue 163`_: scan index links before external links, and don't use the md5 when
-          comparing two distributions
-        
-        ------
-        0.6.12
-        ------
-        
-        * `Issue 149`_: Fixed various failures on 2.3/2.4
-        
-        ------
-        0.6.11
-        ------
-        
-        * Found another case of SandboxViolation - fixed
-        * `Issue 15`_ and 48: Introduced a socket timeout of 15 seconds on url openings
-        * Added indexsidebar.html into MANIFEST.in
-        * `Issue 108`_: Fixed TypeError with Python3.1
-        * `Issue 121`_: Fixed --help install command trying to actually install.
-        * `Issue 112`_: Added an os.makedirs so that Tarek's solution will work.
-        * `Issue 133`_: Added --no-find-links to easy_install
-        * Added easy_install --user
-        * `Issue 100`_: Fixed develop --user not taking '.' in PYTHONPATH into account
-        * `Issue 134`_: removed spurious UserWarnings. Patch by VanLindberg
-        * `Issue 138`_: cant_write_to_target error when setup_requires is used.
-        * `Issue 147`_: respect the sys.dont_write_bytecode flag
-        
-        ------
-        0.6.10
-        ------
-        
-        * Reverted change made for the DistributionNotFound exception because
-          zc.buildout uses the exception message to get the name of the
-          distribution.
-        
-        -----
-        0.6.9
-        -----
-        
-        * `Issue 90`_: unknown setuptools version can be added in the working set
-        * `Issue 87`_: setupt.py doesn't try to convert distribute_setup.py anymore
-          Initial Patch by arfrever.
-        * `Issue 89`_: added a side bar with a download link to the doc.
-        * `Issue 86`_: fixed missing sentence in pkg_resources doc.
-        * Added a nicer error message when a DistributionNotFound is raised.
-        * `Issue 80`_: test_develop now works with Python 3.1
-        * `Issue 93`_: upload_docs now works if there is an empty sub-directory.
-        * `Issue 70`_: exec bit on non-exec files
-        * `Issue 99`_: now the standalone easy_install command doesn't uses a
-          "setup.cfg" if any exists in the working directory. It will use it
-          only if triggered by ``install_requires`` from a setup.py call
-          (install, develop, etc).
-        * `Issue 101`_: Allowing ``os.devnull`` in Sandbox
-        * `Issue 92`_: Fixed the "no eggs" found error with MacPort
-          (platform.mac_ver() fails)
-        * `Issue 103`_: test_get_script_header_jython_workaround not run
-          anymore under py3 with C or POSIX local. Contributed by Arfrever.
-        * `Issue 104`_: remvoved the assertion when the installation fails,
-          with a nicer message for the end user.
-        * `Issue 100`_: making sure there's no SandboxViolation when
-          the setup script patches setuptools.
-        
-        -----
-        0.6.8
-        -----
-        
-        * Added "check_packages" in dist. (added in Setuptools 0.6c11)
-        * Fixed the DONT_PATCH_SETUPTOOLS state.
-        
-        -----
-        0.6.7
-        -----
-        
-        * `Issue 58`_: Added --user support to the develop command
-        * `Issue 11`_: Generated scripts now wrap their call to the script entry point
-          in the standard "if name == 'main'"
-        * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
-          can drive an installation that doesn't patch a global setuptools.
-        * Reviewed unladen-swallow specific change from
-          http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
-          and determined that it no longer applies. Distribute should work fine with
-          Unladen Swallow 2009Q3.
-        * `Issue 21`_: Allow PackageIndex.open_url to gracefully handle all cases of a
-          httplib.HTTPException instead of just InvalidURL and BadStatusLine.
-        * Removed virtual-python.py from this distribution and updated documentation
-          to point to the actively maintained virtualenv instead.
-        * `Issue 64`_: use_setuptools no longer rebuilds the distribute egg every
-          time it is run
-        * use_setuptools now properly respects the requested version
-        * use_setuptools will no longer try to import a distribute egg for the
-          wrong Python version
-        * `Issue 74`_: no_fake should be True by default.
-        * `Issue 72`_: avoid a bootstrapping issue with easy_install -U
-        
-        -----
-        0.6.6
-        -----
-        
-        * Unified the bootstrap file so it works on both py2.x and py3k without 2to3
-          (patch by Holger Krekel)
-        
-        -----
-        0.6.5
-        -----
-        
-        * `Issue 65`_: cli.exe and gui.exe are now generated at build time,
-          depending on the platform in use.
-        
-        * `Issue 67`_: Fixed doc typo (PEP 381/382)
-        
-        * Distribute no longer shadows setuptools if we require a 0.7-series
-          setuptools.  And an error is raised when installing a 0.7 setuptools with
-          distribute.
-        
-        * When run from within buildout, no attempt is made to modify an existing
-          setuptools egg, whether in a shared egg directory or a system setuptools.
-        
-        * Fixed a hole in sandboxing allowing builtin file to write outside of
-          the sandbox.
-        
-        -----
-        0.6.4
-        -----
-        
-        * Added the generation of `distribute_setup_3k.py` during the release.
-          This closes `issue #52`_.
-        
-        * Added an upload_docs command to easily upload project documentation to
-          PyPI's http://packages.python.org. This close `issue #56`_.
-        
-        * Fixed a bootstrap bug on the use_setuptools() API.
-        
-        -----
-        0.6.3
-        -----
-        
-        setuptools
-        ==========
-        
-        * Fixed a bunch of calls to file() that caused crashes on Python 3.
-        
-        bootstrapping
-        =============
-        
-        * Fixed a bug in sorting that caused bootstrap to fail on Python 3.
-        
-        -----
-        0.6.2
-        -----
-        
-        setuptools
-        ==========
-        
-        * Added Python 3 support; see docs/python3.txt.
-          This closes http://bugs.python.org/setuptools/`issue39`_.
-        
-        * Added option to run 2to3 automatically when installing on Python 3.
-          This closes `issue #31`_.
-        
-        * Fixed invalid usage of requirement.parse, that broke develop -d.
-          This closes http://bugs.python.org/setuptools/`issue44`_.
-        
-        * Fixed script launcher for 64-bit Windows.
-          This closes http://bugs.python.org/setuptools/`issue2`_.
-        
-        * KeyError when compiling extensions.
-          This closes http://bugs.python.org/setuptools/`issue41`_.
-        
-        bootstrapping
-        =============
-        
-        * Fixed bootstrap not working on Windows. This closes `issue #49`_.
-        
-        * Fixed 2.6 dependencies. This closes `issue #50`_.
-        
-        * Make sure setuptools is patched when running through easy_install
-          This closes http://bugs.python.org/setuptools/`issue40`_.
-        
-        -----
-        0.6.1
-        -----
-        
-        setuptools
-        ==========
-        
-        * package_index.urlopen now catches BadStatusLine and malformed url errors.
-          This closes `issue #16`_ and `issue #18`_.
-        
-        * zip_ok is now False by default. This closes
-          http://bugs.python.org/setuptools/`issue33`_.
-        
-        * Fixed invalid URL error catching. http://bugs.python.org/setuptools/`issue20`_.
-        
-        * Fixed invalid bootstraping with easy_install installation (`issue #40`_).
-          Thanks to Florian Schulze for the help.
-        
-        * Removed buildout/bootstrap.py. A new repository will create a specific
-          bootstrap.py script.
-        
-        
-        bootstrapping
-        =============
-        
-        * The boostrap process leave setuptools alone if detected in the system
-          and --root or --prefix is provided, but is not in the same location.
-          This closes `issue #10`_.
-        
-        ---
-        0.6
-        ---
-        
-        setuptools
-        ==========
-        
-        * Packages required at build time where not fully present at install time.
-          This closes `issue #12`_.
-        
-        * Protected against failures in tarfile extraction. This closes `issue #10`_.
-        
-        * Made Jython api_tests.txt doctest compatible. This closes `issue #7`_.
-        
-        * sandbox.py replaced builtin type file with builtin function open. This
-          closes `issue #6`_.
-        
-        * Immediately close all file handles. This closes `issue #3`_.
-        
-        * Added compatibility with Subversion 1.6. This references `issue #1`_.
-        
-        pkg_resources
-        =============
-        
-        * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
-          instead. Based on a patch from ronaldoussoren. This closes `issue #5`_.
-        
-        * Fixed a SandboxViolation for mkdir that could occur in certain cases.
-          This closes `issue #13`_.
-        
-        * Allow to find_on_path on systems with tight permissions to fail gracefully.
-          This closes `issue #9`_.
-        
-        * Corrected inconsistency between documentation and code of add_entry.
-          This closes `issue #8`_.
-        
-        * Immediately close all file handles. This closes `issue #3`_.
-        
-        easy_install
-        ============
-        
-        * Immediately close all file handles. This closes `issue #3`_.
-        
-        
-        .. _`Issue #135`: http://bitbucket.org/tarek/distribute/issue/135
-        .. _`Issue #183`: http://bitbucket.org/tarek/distribute/issue/183
-        .. _`Issue #207`: http://bitbucket.org/tarek/distribute/issue/207
-        .. _`Issue #208`: http://bitbucket.org/tarek/distribute/issue/208
-        .. _`Issue #212`: http://bitbucket.org/tarek/distribute/issue/212
-        .. _`Issue #213`: http://bitbucket.org/tarek/distribute/issue/213
-        .. _`Issue #218`: http://bitbucket.org/tarek/distribute/issue/218
-        .. _`Issue #225`: http://bitbucket.org/tarek/distribute/issue/225
-        .. _`Issue #227`: http://bitbucket.org/tarek/distribute/issue/227
-        .. _`Issue #231`: http://bitbucket.org/tarek/distribute/issue/231
-        .. _`Issue #237`: http://bitbucket.org/tarek/distribute/issue/237
-        .. _`Issue #238`: http://bitbucket.org/tarek/distribute/issue/238
-        .. _`Issue #239`: http://bitbucket.org/tarek/distribute/issue/239
-        .. _`Issue #240`: http://bitbucket.org/tarek/distribute/issue/240
-        .. _`Issue #241`: http://bitbucket.org/tarek/distribute/issue/241
-        .. _`Issue #243`: http://bitbucket.org/tarek/distribute/issue/243
-        .. _`Issue #244`: http://bitbucket.org/tarek/distribute/issue/244
-        .. _`Issue #249`: http://bitbucket.org/tarek/distribute/issue/249
-        .. _`Issue #258`: http://bitbucket.org/tarek/distribute/issue/258
-        .. _`Issue #260`: http://bitbucket.org/tarek/distribute/issue/260
-        .. _`Issue #262`: http://bitbucket.org/tarek/distribute/issue/262
-        .. _`Issue #269`: http://bitbucket.org/tarek/distribute/issue/269
-        .. _`Issue #272`: http://bitbucket.org/tarek/distribute/issue/272
-        .. _`Issue #273`: http://bitbucket.org/tarek/distribute/issue/273
-        .. _`Issue #283`: http://bitbucket.org/tarek/distribute/issue/283
-        .. _`Issue #294`: http://bitbucket.org/tarek/distribute/issue/294
-        .. _`Issue #299`: http://bitbucket.org/tarek/distribute/issue/299
-        .. _`Issue #301`: http://bitbucket.org/tarek/distribute/issue/301
-        .. _`Issue #303`: http://bitbucket.org/tarek/distribute/issue/303
-        .. _`Issue #304`: http://bitbucket.org/tarek/distribute/issue/304
-        .. _`Issue #305`: http://bitbucket.org/tarek/distribute/issue/305
-        .. _`Issue #306`: http://bitbucket.org/tarek/distribute/issue/306
-        .. _`Issue #307`: http://bitbucket.org/tarek/distribute/issue/307
-        .. _`Issue #310`: http://bitbucket.org/tarek/distribute/issue/310
-        .. _`Issue #311`: http://bitbucket.org/tarek/distribute/issue/311
-        .. _`Issue #313`: http://bitbucket.org/tarek/distribute/issue/313
-        .. _`Issue #314`: http://bitbucket.org/tarek/distribute/issue/314
-        .. _`Issue #320`: http://bitbucket.org/tarek/distribute/issue/320
-        .. _`Issue #323`: http://bitbucket.org/tarek/distribute/issue/323
-        .. _`Issue #326`: http://bitbucket.org/tarek/distribute/issue/326
-        .. _`Issue #327`: http://bitbucket.org/tarek/distribute/issue/327
-        .. _`Issue #328`: http://bitbucket.org/tarek/distribute/issue/328
-        .. _`Issue #329`: http://bitbucket.org/tarek/distribute/issue/329
-        .. _`Issue #334`: http://bitbucket.org/tarek/distribute/issue/334
-        .. _`Issue #335`: http://bitbucket.org/tarek/distribute/issue/335
-        .. _`Issue #336`: http://bitbucket.org/tarek/distribute/issue/336
-        .. _`Issue #341`: http://bitbucket.org/tarek/distribute/issue/341
-        .. _`Issue 100`: http://bitbucket.org/tarek/distribute/issue/100
-        .. _`Issue 101`: http://bitbucket.org/tarek/distribute/issue/101
-        .. _`Issue 103`: http://bitbucket.org/tarek/distribute/issue/103
-        .. _`Issue 104`: http://bitbucket.org/tarek/distribute/issue/104
-        .. _`Issue 108`: http://bitbucket.org/tarek/distribute/issue/108
-        .. _`Issue 11`: http://bitbucket.org/tarek/distribute/issue/11
-        .. _`Issue 112`: http://bitbucket.org/tarek/distribute/issue/112
-        .. _`Issue 121`: http://bitbucket.org/tarek/distribute/issue/121
-        .. _`Issue 133`: http://bitbucket.org/tarek/distribute/issue/133
-        .. _`Issue 134`: http://bitbucket.org/tarek/distribute/issue/134
-        .. _`Issue 138`: http://bitbucket.org/tarek/distribute/issue/138
-        .. _`Issue 143`: http://bitbucket.org/tarek/distribute/issue/143
-        .. _`Issue 146`: http://bitbucket.org/tarek/distribute/issue/146
-        .. _`Issue 147`: http://bitbucket.org/tarek/distribute/issue/147
-        .. _`Issue 149`: http://bitbucket.org/tarek/distribute/issue/149
-        .. _`Issue 15`: http://bitbucket.org/tarek/distribute/issue/15
-        .. _`Issue 150`: http://bitbucket.org/tarek/distribute/issue/150
-        .. _`Issue 160`: http://bitbucket.org/tarek/distribute/issue/160
-        .. _`Issue 163`: http://bitbucket.org/tarek/distribute/issue/163
-        .. _`Issue 170`: http://bitbucket.org/tarek/distribute/issue/170
-        .. _`Issue 171`: http://bitbucket.org/tarek/distribute/issue/171
-        .. _`Issue 174`: http://bitbucket.org/tarek/distribute/issue/174
-        .. _`Issue 192`: http://bitbucket.org/tarek/distribute/issue/192
-        .. _`Issue 193`: http://bitbucket.org/tarek/distribute/issue/193
-        .. _`Issue 195`: http://bitbucket.org/tarek/distribute/issue/195
-        .. _`Issue 196`: http://bitbucket.org/tarek/distribute/issue/196
-        .. _`Issue 200`: http://bitbucket.org/tarek/distribute/issue/200
-        .. _`Issue 204`: http://bitbucket.org/tarek/distribute/issue/204
-        .. _`Issue 205`: http://bitbucket.org/tarek/distribute/issue/205
-        .. _`Issue 206`: http://bitbucket.org/tarek/distribute/issue/206
-        .. _`Issue 21`: http://bitbucket.org/tarek/distribute/issue/21
-        .. _`Issue 210`: http://bitbucket.org/tarek/distribute/issue/210
-        .. _`Issue 58`: http://bitbucket.org/tarek/distribute/issue/58
-        .. _`Issue 64`: http://bitbucket.org/tarek/distribute/issue/64
-        .. _`Issue 65`: http://bitbucket.org/tarek/distribute/issue/65
-        .. _`Issue 67`: http://bitbucket.org/tarek/distribute/issue/67
-        .. _`Issue 70`: http://bitbucket.org/tarek/distribute/issue/70
-        .. _`Issue 72`: http://bitbucket.org/tarek/distribute/issue/72
-        .. _`Issue 74`: http://bitbucket.org/tarek/distribute/issue/74
-        .. _`Issue 80`: http://bitbucket.org/tarek/distribute/issue/80
-        .. _`Issue 86`: http://bitbucket.org/tarek/distribute/issue/86
-        .. _`Issue 87`: http://bitbucket.org/tarek/distribute/issue/87
-        .. _`Issue 89`: http://bitbucket.org/tarek/distribute/issue/89
-        .. _`Issue 90`: http://bitbucket.org/tarek/distribute/issue/90
-        .. _`Issue 92`: http://bitbucket.org/tarek/distribute/issue/92
-        .. _`Issue 93`: http://bitbucket.org/tarek/distribute/issue/93
-        .. _`Issue 99`: http://bitbucket.org/tarek/distribute/issue/99
-        .. _`issue
-          449`: http://bitbucket.org/tarek/distribute/issue/449
-        .. _`issue #1`: http://bitbucket.org/tarek/distribute/issue/1
-        .. _`issue #10`: http://bitbucket.org/tarek/distribute/issue/10
-        .. _`issue #12`: http://bitbucket.org/tarek/distribute/issue/12
-        .. _`issue #13`: http://bitbucket.org/tarek/distribute/issue/13
-        .. _`issue #16`: http://bitbucket.org/tarek/distribute/issue/16
-        .. _`issue #18`: http://bitbucket.org/tarek/distribute/issue/18
-        .. _`issue #3`: http://bitbucket.org/tarek/distribute/issue/3
-        .. _`issue #31`: http://bitbucket.org/tarek/distribute/issue/31
-        .. _`issue #40`: http://bitbucket.org/tarek/distribute/issue/40
-        .. _`issue #49`: http://bitbucket.org/tarek/distribute/issue/49
-        .. _`issue #5`: http://bitbucket.org/tarek/distribute/issue/5
-        .. _`issue #50`: http://bitbucket.org/tarek/distribute/issue/50
-        .. _`issue #52`: http://bitbucket.org/tarek/distribute/issue/52
-        .. _`issue #56`: http://bitbucket.org/tarek/distribute/issue/56
-        .. _`issue #6`: http://bitbucket.org/tarek/distribute/issue/6
-        .. _`issue #7`: http://bitbucket.org/tarek/distribute/issue/7
-        .. _`issue #8`: http://bitbucket.org/tarek/distribute/issue/8
-        .. _`issue #9`: http://bitbucket.org/tarek/distribute/issue/9
-        .. _`issue1980`: http://bitbucket.org/tarek/distribute/issue/1980
-        .. _`issue1981`: http://bitbucket.org/tarek/distribute/issue/1981
-        .. _`issue2`: http://bitbucket.org/tarek/distribute/issue/2
-        .. _`issue20`: http://bitbucket.org/tarek/distribute/issue/20
-        .. _`issue33`: http://bitbucket.org/tarek/distribute/issue/33
-        .. _`issue39`: http://bitbucket.org/tarek/distribute/issue/39
-        .. _`issue40`: http://bitbucket.org/tarek/distribute/issue/40
-        .. _`issue41`: http://bitbucket.org/tarek/distribute/issue/41
-        .. _`issue44`: http://bitbucket.org/tarek/distribute/issue/44
-        
-        
-Keywords: CPAN PyPI distutils eggs package management
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Python Software Foundation License
-Classifier: License :: OSI Approved :: Zope Public License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2.4
-Classifier: Programming Language :: Python :: 2.5
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.1
-Classifier: Programming Language :: Python :: 3.2
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: System :: Archiving :: Packaging
-Classifier: Topic :: System :: Systems Administration
-Classifier: Topic :: Utilities
diff --git a/vendor/distribute-0.6.34/distribute.egg-info/SOURCES.txt b/vendor/distribute-0.6.34/distribute.egg-info/SOURCES.txt
deleted file mode 100644
index 186220d23c35f627ed0da69b312d72c0080acc75..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,109 +0,0 @@
-CHANGES.txt
-CONTRIBUTORS.txt
-DEVGUIDE.txt
-MANIFEST.in
-README.txt
-distribute_setup.py
-easy_install.py
-launcher.c
-pkg_resources.py
-release.py
-setup.cfg
-setup.py
-site.py
-_markerlib/__init__.py
-_markerlib/markers.py
-distribute.egg-info/PKG-INFO
-distribute.egg-info/SOURCES.txt
-distribute.egg-info/dependency_links.txt
-distribute.egg-info/entry_points.txt
-distribute.egg-info/top_level.txt
-distribute.egg-info/zip-safe
-docs/Makefile
-docs/conf.py
-docs/easy_install.txt
-docs/index.txt
-docs/pkg_resources.txt
-docs/python3.txt
-docs/roadmap.txt
-docs/setuptools.txt
-docs/using.txt
-docs/_templates/indexsidebar.html
-docs/_theme/nature/theme.conf
-docs/_theme/nature/static/nature.css_t
-docs/_theme/nature/static/pygments.css
-docs/build/html/_sources/easy_install.txt
-docs/build/html/_sources/index.txt
-docs/build/html/_sources/pkg_resources.txt
-docs/build/html/_sources/python3.txt
-docs/build/html/_sources/roadmap.txt
-docs/build/html/_sources/setuptools.txt
-docs/build/html/_sources/using.txt
-docs/build/html/_static/basic.css
-docs/build/html/_static/nature.css
-docs/build/html/_static/pygments.css
-setuptools/__init__.py
-setuptools/archive_util.py
-setuptools/cli-32.exe
-setuptools/cli-64.exe
-setuptools/cli.exe
-setuptools/depends.py
-setuptools/dist.py
-setuptools/extension.py
-setuptools/gui-32.exe
-setuptools/gui-64.exe
-setuptools/gui.exe
-setuptools/package_index.py
-setuptools/sandbox.py
-setuptools/script template (dev).py
-setuptools/script template.py
-setuptools/command/__init__.py
-setuptools/command/alias.py
-setuptools/command/bdist_egg.py
-setuptools/command/bdist_rpm.py
-setuptools/command/bdist_wininst.py
-setuptools/command/build_ext.py
-setuptools/command/build_py.py
-setuptools/command/develop.py
-setuptools/command/easy_install.py
-setuptools/command/egg_info.py
-setuptools/command/install.py
-setuptools/command/install_egg_info.py
-setuptools/command/install_lib.py
-setuptools/command/install_scripts.py
-setuptools/command/register.py
-setuptools/command/rotate.py
-setuptools/command/saveopts.py
-setuptools/command/sdist.py
-setuptools/command/setopt.py
-setuptools/command/test.py
-setuptools/command/upload.py
-setuptools/command/upload_docs.py
-setuptools/tests/__init__.py
-setuptools/tests/doctest.py
-setuptools/tests/py26compat.py
-setuptools/tests/server.py
-setuptools/tests/test_bdist_egg.py
-setuptools/tests/test_build_ext.py
-setuptools/tests/test_develop.py
-setuptools/tests/test_dist_info.py
-setuptools/tests/test_easy_install.py
-setuptools/tests/test_markerlib.py
-setuptools/tests/test_packageindex.py
-setuptools/tests/test_resources.py
-setuptools/tests/test_sandbox.py
-setuptools/tests/test_sdist.py
-setuptools/tests/test_test.py
-setuptools/tests/test_upload_docs.py
-setuptools/tests/win_script_wrapper.txt
-setuptools/tests/indexes/test_links_priority/external.html
-setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
-tests/api_tests.txt
-tests/install_test.py
-tests/manual_test.py
-tests/test_distribute_setup.py
-tests/shlib_test/hello.c
-tests/shlib_test/hello.pyx
-tests/shlib_test/hellolib.c
-tests/shlib_test/setup.py
-tests/shlib_test/test_hello.py
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/distribute.egg-info/dependency_links.txt b/vendor/distribute-0.6.34/distribute.egg-info/dependency_links.txt
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/vendor/distribute-0.6.34/distribute.egg-info/entry_points.txt b/vendor/distribute-0.6.34/distribute.egg-info/entry_points.txt
deleted file mode 100644
index 663882d630edfb4b4e59b2705c8345d70b75f8f3..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute.egg-info/entry_points.txt
+++ /dev/null
@@ -1,62 +0,0 @@
-[distutils.commands]
-bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
-rotate = setuptools.command.rotate:rotate
-develop = setuptools.command.develop:develop
-setopt = setuptools.command.setopt:setopt
-build_py = setuptools.command.build_py:build_py
-saveopts = setuptools.command.saveopts:saveopts
-egg_info = setuptools.command.egg_info:egg_info
-register = setuptools.command.register:register
-upload_docs = setuptools.command.upload_docs:upload_docs
-install_egg_info = setuptools.command.install_egg_info:install_egg_info
-alias = setuptools.command.alias:alias
-easy_install = setuptools.command.easy_install:easy_install
-install_scripts = setuptools.command.install_scripts:install_scripts
-bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
-bdist_egg = setuptools.command.bdist_egg:bdist_egg
-install = setuptools.command.install:install
-test = setuptools.command.test:test
-install_lib = setuptools.command.install_lib:install_lib
-build_ext = setuptools.command.build_ext:build_ext
-sdist = setuptools.command.sdist:sdist
-
-[egg_info.writers]
-dependency_links.txt = setuptools.command.egg_info:overwrite_arg
-requires.txt = setuptools.command.egg_info:write_requirements
-PKG-INFO = setuptools.command.egg_info:write_pkg_info
-eager_resources.txt = setuptools.command.egg_info:overwrite_arg
-top_level.txt = setuptools.command.egg_info:write_toplevel_names
-namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
-entry_points.txt = setuptools.command.egg_info:write_entries
-depends.txt = setuptools.command.egg_info:warn_depends_obsolete
-
-[console_scripts]
-easy_install = setuptools.command.easy_install:main
-easy_install-2.7 = setuptools.command.easy_install:main
-
-[setuptools.file_finders]
-svn_cvs = setuptools.command.sdist:_default_revctrl
-
-[distutils.setup_keywords]
-dependency_links = setuptools.dist:assert_string_list
-entry_points = setuptools.dist:check_entry_points
-extras_require = setuptools.dist:check_extras
-use_2to3_exclude_fixers = setuptools.dist:assert_string_list
-package_data = setuptools.dist:check_package_data
-install_requires = setuptools.dist:check_requirements
-use_2to3 = setuptools.dist:assert_bool
-use_2to3_fixers = setuptools.dist:assert_string_list
-include_package_data = setuptools.dist:assert_bool
-exclude_package_data = setuptools.dist:check_package_data
-namespace_packages = setuptools.dist:check_nsp
-test_suite = setuptools.dist:check_test_suite
-eager_resources = setuptools.dist:assert_string_list
-zip_safe = setuptools.dist:assert_bool
-test_loader = setuptools.dist:check_importable
-packages = setuptools.dist:check_packages
-convert_2to3_doctests = setuptools.dist:assert_string_list
-tests_require = setuptools.dist:check_requirements
-
-[setuptools.installation]
-eggsecutable = setuptools.command.easy_install:bootstrap
-
diff --git a/vendor/distribute-0.6.34/distribute.egg-info/top_level.txt b/vendor/distribute-0.6.34/distribute.egg-info/top_level.txt
deleted file mode 100644
index 23374f9ccfaac50c7ffdbe61049c5b3d7db9085d..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute.egg-info/top_level.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-_markerlib
-easy_install
-pkg_resources
-setuptools
-site
diff --git a/vendor/distribute-0.6.34/distribute.egg-info/zip-safe b/vendor/distribute-0.6.34/distribute.egg-info/zip-safe
deleted file mode 100644
index d3f5a12faa99758192ecc4ed3fc22c9249232e86..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute.egg-info/zip-safe
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/vendor/distribute-0.6.34/distribute_setup.py b/vendor/distribute-0.6.34/distribute_setup.py
deleted file mode 100644
index a1cc2a1a9f6d04ef01f8e9f62e03c37886be42b8..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/distribute_setup.py
+++ /dev/null
@@ -1,546 +0,0 @@
-#!python
-"""Bootstrap distribute installation
-
-If you want to use setuptools in your package's setup.py, just include this
-file in the same directory with it, and add this to the top of your setup.py::
-
-    from distribute_setup import use_setuptools
-    use_setuptools()
-
-If you want to require a specific version of setuptools, set a download
-mirror, or use an alternate download directory, you can do so by supplying
-the appropriate options to ``use_setuptools()``.
-
-This file can also be run as a script to install or upgrade setuptools.
-"""
-import os
-import shutil
-import sys
-import time
-import fnmatch
-import tempfile
-import tarfile
-import optparse
-
-from distutils import log
-
-try:
-    from site import USER_SITE
-except ImportError:
-    USER_SITE = None
-
-try:
-    import subprocess
-
-    def _python_cmd(*args):
-        args = (sys.executable,) + args
-        return subprocess.call(args) == 0
-
-except ImportError:
-    # will be used for python 2.3
-    def _python_cmd(*args):
-        args = (sys.executable,) + args
-        # quoting arguments if windows
-        if sys.platform == 'win32':
-            def quote(arg):
-                if ' ' in arg:
-                    return '"%s"' % arg
-                return arg
-            args = [quote(arg) for arg in args]
-        return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
-
-DEFAULT_VERSION = "0.6.34"
-DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
-SETUPTOOLS_FAKED_VERSION = "0.6c11"
-
-SETUPTOOLS_PKG_INFO = """\
-Metadata-Version: 1.0
-Name: setuptools
-Version: %s
-Summary: xxxx
-Home-page: xxx
-Author: xxx
-Author-email: xxx
-License: xxx
-Description: xxx
-""" % SETUPTOOLS_FAKED_VERSION
-
-
-def _install(tarball, install_args=()):
-    # extracting the tarball
-    tmpdir = tempfile.mkdtemp()
-    log.warn('Extracting in %s', tmpdir)
-    old_wd = os.getcwd()
-    try:
-        os.chdir(tmpdir)
-        tar = tarfile.open(tarball)
-        _extractall(tar)
-        tar.close()
-
-        # going in the directory
-        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
-        os.chdir(subdir)
-        log.warn('Now working in %s', subdir)
-
-        # installing
-        log.warn('Installing Distribute')
-        if not _python_cmd('setup.py', 'install', *install_args):
-            log.warn('Something went wrong during the installation.')
-            log.warn('See the error message above.')
-            # exitcode will be 2
-            return 2
-    finally:
-        os.chdir(old_wd)
-        shutil.rmtree(tmpdir)
-
-
-def _build_egg(egg, tarball, to_dir):
-    # extracting the tarball
-    tmpdir = tempfile.mkdtemp()
-    log.warn('Extracting in %s', tmpdir)
-    old_wd = os.getcwd()
-    try:
-        os.chdir(tmpdir)
-        tar = tarfile.open(tarball)
-        _extractall(tar)
-        tar.close()
-
-        # going in the directory
-        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
-        os.chdir(subdir)
-        log.warn('Now working in %s', subdir)
-
-        # building an egg
-        log.warn('Building a Distribute egg in %s', to_dir)
-        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
-
-    finally:
-        os.chdir(old_wd)
-        shutil.rmtree(tmpdir)
-    # returning the result
-    log.warn(egg)
-    if not os.path.exists(egg):
-        raise IOError('Could not build the egg.')
-
-
-def _do_download(version, download_base, to_dir, download_delay):
-    egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
-                       % (version, sys.version_info[0], sys.version_info[1]))
-    if not os.path.exists(egg):
-        tarball = download_setuptools(version, download_base,
-                                      to_dir, download_delay)
-        _build_egg(egg, tarball, to_dir)
-    sys.path.insert(0, egg)
-    import setuptools
-    setuptools.bootstrap_install_from = egg
-
-
-def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-                   to_dir=os.curdir, download_delay=15, no_fake=True):
-    # making sure we use the absolute path
-    to_dir = os.path.abspath(to_dir)
-    was_imported = 'pkg_resources' in sys.modules or \
-        'setuptools' in sys.modules
-    try:
-        try:
-            import pkg_resources
-            if not hasattr(pkg_resources, '_distribute'):
-                if not no_fake:
-                    _fake_setuptools()
-                raise ImportError
-        except ImportError:
-            return _do_download(version, download_base, to_dir, download_delay)
-        try:
-            pkg_resources.require("distribute>=" + version)
-            return
-        except pkg_resources.VersionConflict:
-            e = sys.exc_info()[1]
-            if was_imported:
-                sys.stderr.write(
-                "The required version of distribute (>=%s) is not available,\n"
-                "and can't be installed while this script is running. Please\n"
-                "install a more recent version first, using\n"
-                "'easy_install -U distribute'."
-                "\n\n(Currently using %r)\n" % (version, e.args[0]))
-                sys.exit(2)
-            else:
-                del pkg_resources, sys.modules['pkg_resources']    # reload ok
-                return _do_download(version, download_base, to_dir,
-                                    download_delay)
-        except pkg_resources.DistributionNotFound:
-            return _do_download(version, download_base, to_dir,
-                                download_delay)
-    finally:
-        if not no_fake:
-            _create_fake_setuptools_pkg_info(to_dir)
-
-
-def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-                        to_dir=os.curdir, delay=15):
-    """Download distribute from a specified location and return its filename
-
-    `version` should be a valid distribute version number that is available
-    as an egg for download under the `download_base` URL (which should end
-    with a '/'). `to_dir` is the directory where the egg will be downloaded.
-    `delay` is the number of seconds to pause before an actual download
-    attempt.
-    """
-    # making sure we use the absolute path
-    to_dir = os.path.abspath(to_dir)
-    try:
-        from urllib.request import urlopen
-    except ImportError:
-        from urllib2 import urlopen
-    tgz_name = "distribute-%s.tar.gz" % version
-    url = download_base + tgz_name
-    saveto = os.path.join(to_dir, tgz_name)
-    src = dst = None
-    if not os.path.exists(saveto):  # Avoid repeated downloads
-        try:
-            log.warn("Downloading %s", url)
-            src = urlopen(url)
-            # Read/write all in one block, so we don't create a corrupt file
-            # if the download is interrupted.
-            data = src.read()
-            dst = open(saveto, "wb")
-            dst.write(data)
-        finally:
-            if src:
-                src.close()
-            if dst:
-                dst.close()
-    return os.path.realpath(saveto)
-
-
-def _no_sandbox(function):
-    def __no_sandbox(*args, **kw):
-        try:
-            from setuptools.sandbox import DirectorySandbox
-            if not hasattr(DirectorySandbox, '_old'):
-                def violation(*args):
-                    pass
-                DirectorySandbox._old = DirectorySandbox._violation
-                DirectorySandbox._violation = violation
-                patched = True
-            else:
-                patched = False
-        except ImportError:
-            patched = False
-
-        try:
-            return function(*args, **kw)
-        finally:
-            if patched:
-                DirectorySandbox._violation = DirectorySandbox._old
-                del DirectorySandbox._old
-
-    return __no_sandbox
-
-
-def _patch_file(path, content):
-    """Will backup the file then patch it"""
-    f = open(path)
-    existing_content = f.read()
-    f.close()
-    if existing_content == content:
-        # already patched
-        log.warn('Already patched.')
-        return False
-    log.warn('Patching...')
-    _rename_path(path)
-    f = open(path, 'w')
-    try:
-        f.write(content)
-    finally:
-        f.close()
-    return True
-
-_patch_file = _no_sandbox(_patch_file)
-
-
-def _same_content(path, content):
-    f = open(path)
-    existing_content = f.read()
-    f.close()
-    return existing_content == content
-
-
-def _rename_path(path):
-    new_name = path + '.OLD.%s' % time.time()
-    log.warn('Renaming %s to %s', path, new_name)
-    os.rename(path, new_name)
-    return new_name
-
-
-def _remove_flat_installation(placeholder):
-    if not os.path.isdir(placeholder):
-        log.warn('Unkown installation at %s', placeholder)
-        return False
-    found = False
-    for file in os.listdir(placeholder):
-        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
-            found = True
-            break
-    if not found:
-        log.warn('Could not locate setuptools*.egg-info')
-        return
-
-    log.warn('Moving elements out of the way...')
-    pkg_info = os.path.join(placeholder, file)
-    if os.path.isdir(pkg_info):
-        patched = _patch_egg_dir(pkg_info)
-    else:
-        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
-
-    if not patched:
-        log.warn('%s already patched.', pkg_info)
-        return False
-    # now let's move the files out of the way
-    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
-        element = os.path.join(placeholder, element)
-        if os.path.exists(element):
-            _rename_path(element)
-        else:
-            log.warn('Could not find the %s element of the '
-                     'Setuptools distribution', element)
-    return True
-
-_remove_flat_installation = _no_sandbox(_remove_flat_installation)
-
-
-def _after_install(dist):
-    log.warn('After install bootstrap.')
-    placeholder = dist.get_command_obj('install').install_purelib
-    _create_fake_setuptools_pkg_info(placeholder)
-
-
-def _create_fake_setuptools_pkg_info(placeholder):
-    if not placeholder or not os.path.exists(placeholder):
-        log.warn('Could not find the install location')
-        return
-    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
-    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
-            (SETUPTOOLS_FAKED_VERSION, pyver)
-    pkg_info = os.path.join(placeholder, setuptools_file)
-    if os.path.exists(pkg_info):
-        log.warn('%s already exists', pkg_info)
-        return
-
-    log.warn('Creating %s', pkg_info)
-    try:
-        f = open(pkg_info, 'w')
-    except EnvironmentError:
-        log.warn("Don't have permissions to write %s, skipping", pkg_info)
-        return
-    try:
-        f.write(SETUPTOOLS_PKG_INFO)
-    finally:
-        f.close()
-
-    pth_file = os.path.join(placeholder, 'setuptools.pth')
-    log.warn('Creating %s', pth_file)
-    f = open(pth_file, 'w')
-    try:
-        f.write(os.path.join(os.curdir, setuptools_file))
-    finally:
-        f.close()
-
-_create_fake_setuptools_pkg_info = _no_sandbox(
-    _create_fake_setuptools_pkg_info
-)
-
-
-def _patch_egg_dir(path):
-    # let's check if it's already patched
-    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-    if os.path.exists(pkg_info):
-        if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
-            log.warn('%s already patched.', pkg_info)
-            return False
-    _rename_path(path)
-    os.mkdir(path)
-    os.mkdir(os.path.join(path, 'EGG-INFO'))
-    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-    f = open(pkg_info, 'w')
-    try:
-        f.write(SETUPTOOLS_PKG_INFO)
-    finally:
-        f.close()
-    return True
-
-_patch_egg_dir = _no_sandbox(_patch_egg_dir)
-
-
-def _before_install():
-    log.warn('Before install bootstrap.')
-    _fake_setuptools()
-
-
-def _under_prefix(location):
-    if 'install' not in sys.argv:
-        return True
-    args = sys.argv[sys.argv.index('install') + 1:]
-    for index, arg in enumerate(args):
-        for option in ('--root', '--prefix'):
-            if arg.startswith('%s=' % option):
-                top_dir = arg.split('root=')[-1]
-                return location.startswith(top_dir)
-            elif arg == option:
-                if len(args) > index:
-                    top_dir = args[index + 1]
-                    return location.startswith(top_dir)
-        if arg == '--user' and USER_SITE is not None:
-            return location.startswith(USER_SITE)
-    return True
-
-
-def _fake_setuptools():
-    log.warn('Scanning installed packages')
-    try:
-        import pkg_resources
-    except ImportError:
-        # we're cool
-        log.warn('Setuptools or Distribute does not seem to be installed.')
-        return
-    ws = pkg_resources.working_set
-    try:
-        setuptools_dist = ws.find(
-            pkg_resources.Requirement.parse('setuptools', replacement=False)
-            )
-    except TypeError:
-        # old distribute API
-        setuptools_dist = ws.find(
-            pkg_resources.Requirement.parse('setuptools')
-        )
-
-    if setuptools_dist is None:
-        log.warn('No setuptools distribution found')
-        return
-    # detecting if it was already faked
-    setuptools_location = setuptools_dist.location
-    log.warn('Setuptools installation detected at %s', setuptools_location)
-
-    # if --root or --preix was provided, and if
-    # setuptools is not located in them, we don't patch it
-    if not _under_prefix(setuptools_location):
-        log.warn('Not patching, --root or --prefix is installing Distribute'
-                 ' in another location')
-        return
-
-    # let's see if its an egg
-    if not setuptools_location.endswith('.egg'):
-        log.warn('Non-egg installation')
-        res = _remove_flat_installation(setuptools_location)
-        if not res:
-            return
-    else:
-        log.warn('Egg installation')
-        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
-        if (os.path.exists(pkg_info) and
-            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
-            log.warn('Already patched.')
-            return
-        log.warn('Patching...')
-        # let's create a fake egg replacing setuptools one
-        res = _patch_egg_dir(setuptools_location)
-        if not res:
-            return
-    log.warn('Patching complete.')
-    _relaunch()
-
-
-def _relaunch():
-    log.warn('Relaunching...')
-    # we have to relaunch the process
-    # pip marker to avoid a relaunch bug
-    _cmd1 = ['-c', 'install', '--single-version-externally-managed']
-    _cmd2 = ['-c', 'install', '--record']
-    if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
-        sys.argv[0] = 'setup.py'
-    args = [sys.executable] + sys.argv
-    sys.exit(subprocess.call(args))
-
-
-def _extractall(self, path=".", members=None):
-    """Extract all members from the archive to the current working
-       directory and set owner, modification time and permissions on
-       directories afterwards. `path' specifies a different directory
-       to extract to. `members' is optional and must be a subset of the
-       list returned by getmembers().
-    """
-    import copy
-    import operator
-    from tarfile import ExtractError
-    directories = []
-
-    if members is None:
-        members = self
-
-    for tarinfo in members:
-        if tarinfo.isdir():
-            # Extract directories with a safe mode.
-            directories.append(tarinfo)
-            tarinfo = copy.copy(tarinfo)
-            tarinfo.mode = 448  # decimal for oct 0700
-        self.extract(tarinfo, path)
-
-    # Reverse sort directories.
-    if sys.version_info < (2, 4):
-        def sorter(dir1, dir2):
-            return cmp(dir1.name, dir2.name)
-        directories.sort(sorter)
-        directories.reverse()
-    else:
-        directories.sort(key=operator.attrgetter('name'), reverse=True)
-
-    # Set correct owner, mtime and filemode on directories.
-    for tarinfo in directories:
-        dirpath = os.path.join(path, tarinfo.name)
-        try:
-            self.chown(tarinfo, dirpath)
-            self.utime(tarinfo, dirpath)
-            self.chmod(tarinfo, dirpath)
-        except ExtractError:
-            e = sys.exc_info()[1]
-            if self.errorlevel > 1:
-                raise
-            else:
-                self._dbg(1, "tarfile: %s" % e)
-
-
-def _build_install_args(options):
-    """
-    Build the arguments to 'python setup.py install' on the distribute package
-    """
-    install_args = []
-    if options.user_install:
-        if sys.version_info < (2, 6):
-            log.warn("--user requires Python 2.6 or later")
-            raise SystemExit(1)
-        install_args.append('--user')
-    return install_args
-
-def _parse_args():
-    """
-    Parse the command line for options
-    """
-    parser = optparse.OptionParser()
-    parser.add_option(
-        '--user', dest='user_install', action='store_true', default=False,
-        help='install in user site package (requires Python 2.6 or later)')
-    parser.add_option(
-        '--download-base', dest='download_base', metavar="URL",
-        default=DEFAULT_URL,
-        help='alternative URL from where to download the distribute package')
-    options, args = parser.parse_args()
-    # positional arguments are ignored
-    return options
-
-def main(version=DEFAULT_VERSION):
-    """Install or upgrade setuptools and EasyInstall"""
-    options = _parse_args()
-    tarball = download_setuptools(download_base=options.download_base)
-    return _install(tarball, _build_install_args(options))
-
-if __name__ == '__main__':
-    sys.exit(main())
diff --git a/vendor/distribute-0.6.34/docs/Makefile b/vendor/distribute-0.6.34/docs/Makefile
deleted file mode 100644
index 30bf10a930f084877b9b19d035651687e5a875de..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/Makefile
+++ /dev/null
@@ -1,75 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = sphinx-build
-PAPER         =
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html web pickle htmlhelp latex changes linkcheck
-
-help:
-	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html      to make standalone HTML files"
-	@echo "  pickle    to make pickle files"
-	@echo "  json      to make JSON files"
-	@echo "  htmlhelp  to make HTML files and a HTML help project"
-	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  changes   to make an overview over all changed/added/deprecated items"
-	@echo "  linkcheck to check all external links for integrity"
-
-clean:
-	-rm -rf build/*
-
-html:
-	mkdir -p build/html build/doctrees
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
-	@echo
-	@echo "Build finished. The HTML pages are in build/html."
-
-pickle:
-	mkdir -p build/pickle build/doctrees
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-web: pickle
-
-json:
-	mkdir -p build/json build/doctrees
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
-	mkdir -p build/htmlhelp build/doctrees
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in build/htmlhelp."
-
-latex:
-	mkdir -p build/latex build/doctrees
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in build/latex."
-	@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
-	      "run these through (pdf)latex."
-
-changes:
-	mkdir -p build/changes build/doctrees
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
-	@echo
-	@echo "The overview file is in build/changes."
-
-linkcheck:
-	mkdir -p build/linkcheck build/doctrees
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in build/linkcheck/output.txt."
diff --git a/vendor/distribute-0.6.34/docs/_templates/indexsidebar.html b/vendor/distribute-0.6.34/docs/_templates/indexsidebar.html
deleted file mode 100644
index 932909f3e100eda84b017d1fbb38c81d488c1d44..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/_templates/indexsidebar.html
+++ /dev/null
@@ -1,8 +0,0 @@
-<h3>Download</h3> 
-
-<p>Current version: <b>{{ version }}</b></p> 
-<p>Get Distribute from the <a href="http://pypi.python.org/pypi/distribute"> Python Package Index</a>
-
-<h3>Questions? Suggestions? Contributions?</h3>
-
-<p>Visit the <a href="http://bitbucket.org/tarek/distribute">Distribute project page</a> </p>
diff --git a/vendor/distribute-0.6.34/docs/_theme/nature/static/nature.css_t b/vendor/distribute-0.6.34/docs/_theme/nature/static/nature.css_t
deleted file mode 100644
index 1a654264d17b66f2098c1a74978c4e7bdfaf17fd..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/_theme/nature/static/nature.css_t
+++ /dev/null
@@ -1,237 +0,0 @@
-/**
- * Sphinx stylesheet -- default theme
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- */
- 
-@import url("basic.css");
- 
-/* -- page layout ----------------------------------------------------------- */
- 
-body {
-    font-family: Arial, sans-serif;
-    font-size: 100%;
-    background-color: #111111;
-    color: #555555;
-    margin: 0;
-    padding: 0;
-}
-
-div.documentwrapper {
-    float: left;
-    width: 100%;
-}
-
-div.bodywrapper {
-    margin: 0 0 0 300px;
-}
-
-hr{
-    border: 1px solid #B1B4B6;
-}
- 
-div.document {
-    background-color: #fafafa;
-}
- 
-div.body {
-    background-color: #ffffff;
-    color: #3E4349;
-    padding: 1em 30px 30px 30px;
-    font-size: 0.9em;
-}
- 
-div.footer {
-    color: #555;
-    width: 100%;
-    padding: 13px 0;
-    text-align: center;
-    font-size: 75%;
-}
- 
-div.footer a {
-    color: #444444;
-}
- 
-div.related {
-    background-color: #6BA81E;
-    line-height: 36px;
-    color: #ffffff;
-    text-shadow: 0px 1px 0 #444444;
-    font-size: 1.1em;
-}
- 
-div.related a {
-    color: #E2F3CC;
-}
-
-div.related .right {
-    font-size: 0.9em;
-}
-
-div.sphinxsidebar {
-    font-size: 0.9em;
-    line-height: 1.5em;
-    width: 300px;
-}
-
-div.sphinxsidebarwrapper{
-    padding: 20px 0;
-}
- 
-div.sphinxsidebar h3,
-div.sphinxsidebar h4 {
-    font-family: Arial, sans-serif;
-    color: #222222;
-    font-size: 1.2em;
-    font-weight: bold;
-    margin: 0;
-    padding: 5px 10px;
-    text-shadow: 1px 1px 0 white
-}
-
-div.sphinxsidebar h3 a {
-    color: #444444;
-}
-
-div.sphinxsidebar p {
-    color: #888888;
-    padding: 5px 20px;
-    margin: 0.5em 0px;
-}
- 
-div.sphinxsidebar p.topless {
-}
- 
-div.sphinxsidebar ul {
-    margin: 10px 10px 10px 20px;
-    padding: 0;
-    color: #000000;
-}
- 
-div.sphinxsidebar a {
-    color: #444444;
-}
-
-div.sphinxsidebar a:hover {
-    color: #E32E00;
-}
-
-div.sphinxsidebar input {
-    border: 1px solid #cccccc;
-    font-family: sans-serif;
-    font-size: 1.1em;
-    padding: 0.15em 0.3em;
-}
-
-div.sphinxsidebar input[type=text]{
-    margin-left: 20px;
-}
- 
-/* -- body styles ----------------------------------------------------------- */
- 
-a {
-    color: #005B81;
-    text-decoration: none;
-}
- 
-a:hover {
-    color: #E32E00;
-}
- 
-div.body h1,
-div.body h2,
-div.body h3,
-div.body h4,
-div.body h5,
-div.body h6 {
-    font-family: Arial, sans-serif;
-    font-weight: normal;
-    color: #212224;
-    margin: 30px 0px 10px 0px;
-    padding: 5px 0 5px 0px;
-    text-shadow: 0px 1px 0 white;
-    border-bottom: 1px solid #C8D5E3;
-}
- 
-div.body h1 { margin-top: 0; font-size: 200%; }
-div.body h2 { font-size: 150%; }
-div.body h3 { font-size: 120%; }
-div.body h4 { font-size: 110%; }
-div.body h5 { font-size: 100%; }
-div.body h6 { font-size: 100%; }
- 
-a.headerlink {
-    color: #c60f0f;
-    font-size: 0.8em;
-    padding: 0 4px 0 4px;
-    text-decoration: none;
-}
- 
-a.headerlink:hover {
-    background-color: #c60f0f;
-    color: white;
-}
- 
-div.body p, div.body dd, div.body li {
-    line-height: 1.8em;
-}
- 
-div.admonition p.admonition-title + p {
-    display: inline;
-}
-
-div.highlight{
-    background-color: white;
-}
-
-div.note {
-    background-color: #eeeeee;
-    border: 1px solid #cccccc;
-}
- 
-div.seealso {
-    background-color: #ffffcc;
-    border: 1px solid #ffff66;
-}
- 
-div.topic {
-    background-color: #fafafa;
-    border-width: 0;
-}
- 
-div.warning {
-    background-color: #ffe4e4;
-    border: 1px solid #ff6666;
-}
- 
-p.admonition-title {
-    display: inline;
-}
- 
-p.admonition-title:after {
-    content: ":";
-}
- 
-pre {
-    padding: 10px;
-    background-color: #fafafa;
-    color: #222222;
-    line-height: 1.5em;
-    font-size: 1.1em;
-    margin: 1.5em 0 1.5em 0;
-    -webkit-box-shadow: 0px 0px 4px #d8d8d8;
-    -moz-box-shadow: 0px 0px 4px #d8d8d8;
-    box-shadow: 0px 0px 4px #d8d8d8;
-}
- 
-tt {
-    color: #222222;
-    padding: 1px 2px;
-    font-size: 1.2em;
-    font-family: monospace;
-}
-
-#table-of-contents ul {
-    padding-left: 2em;
-}
-
diff --git a/vendor/distribute-0.6.34/docs/_theme/nature/static/pygments.css b/vendor/distribute-0.6.34/docs/_theme/nature/static/pygments.css
deleted file mode 100644
index 652b76128b6a174f3407a50fff8735896f47d863..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/_theme/nature/static/pygments.css
+++ /dev/null
@@ -1,54 +0,0 @@
-.c { color: #999988; font-style: italic } /* Comment */
-.k { font-weight: bold } /* Keyword */
-.o { font-weight: bold } /* Operator */
-.cm { color: #999988; font-style: italic } /* Comment.Multiline */
-.cp { color: #999999; font-weight: bold } /* Comment.preproc */
-.c1 { color: #999988; font-style: italic } /* Comment.Single */
-.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
-.ge { font-style: italic } /* Generic.Emph */
-.gr { color: #aa0000 } /* Generic.Error */
-.gh { color: #999999 } /* Generic.Heading */
-.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
-.go { color: #111 } /* Generic.Output */
-.gp { color: #555555 } /* Generic.Prompt */
-.gs { font-weight: bold } /* Generic.Strong */
-.gu { color: #aaaaaa } /* Generic.Subheading */
-.gt { color: #aa0000 } /* Generic.Traceback */
-.kc { font-weight: bold } /* Keyword.Constant */
-.kd { font-weight: bold } /* Keyword.Declaration */
-.kp { font-weight: bold } /* Keyword.Pseudo */
-.kr { font-weight: bold } /* Keyword.Reserved */
-.kt { color: #445588; font-weight: bold } /* Keyword.Type */
-.m { color: #009999 } /* Literal.Number */
-.s { color: #bb8844 } /* Literal.String */
-.na { color: #008080 } /* Name.Attribute */
-.nb { color: #999999 } /* Name.Builtin */
-.nc { color: #445588; font-weight: bold } /* Name.Class */
-.no { color: #ff99ff } /* Name.Constant */
-.ni { color: #800080 } /* Name.Entity */
-.ne { color: #990000; font-weight: bold } /* Name.Exception */
-.nf { color: #990000; font-weight: bold } /* Name.Function */
-.nn { color: #555555 } /* Name.Namespace */
-.nt { color: #000080 } /* Name.Tag */
-.nv { color: purple } /* Name.Variable */
-.ow { font-weight: bold } /* Operator.Word */
-.mf { color: #009999 } /* Literal.Number.Float */
-.mh { color: #009999 } /* Literal.Number.Hex */
-.mi { color: #009999 } /* Literal.Number.Integer */
-.mo { color: #009999 } /* Literal.Number.Oct */
-.sb { color: #bb8844 } /* Literal.String.Backtick */
-.sc { color: #bb8844 } /* Literal.String.Char */
-.sd { color: #bb8844 } /* Literal.String.Doc */
-.s2 { color: #bb8844 } /* Literal.String.Double */
-.se { color: #bb8844 } /* Literal.String.Escape */
-.sh { color: #bb8844 } /* Literal.String.Heredoc */
-.si { color: #bb8844 } /* Literal.String.Interpol */
-.sx { color: #bb8844 } /* Literal.String.Other */
-.sr { color: #808000 } /* Literal.String.Regex */
-.s1 { color: #bb8844 } /* Literal.String.Single */
-.ss { color: #bb8844 } /* Literal.String.Symbol */
-.bp { color: #999999 } /* Name.Builtin.Pseudo */
-.vc { color: #ff99ff } /* Name.Variable.Class */
-.vg { color: #ff99ff } /* Name.Variable.Global */
-.vi { color: #ff99ff } /* Name.Variable.Instance */
-.il { color: #009999 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/docs/_theme/nature/theme.conf b/vendor/distribute-0.6.34/docs/_theme/nature/theme.conf
deleted file mode 100644
index 1cc40044646bb73870088ddc88543c58a3ca083e..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/_theme/nature/theme.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[theme]
-inherit = basic
-stylesheet = nature.css
-pygments_style = tango
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/easy_install.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/easy_install.txt
deleted file mode 100644
index 9b4fcfbb6e673e9c97de8503d0deeab602f1594c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/easy_install.txt
+++ /dev/null
@@ -1,1597 +0,0 @@
-============
-Easy Install
-============
-
-Easy Install is a python module (``easy_install``) bundled with ``setuptools``
-that lets you automatically download, build, install, and manage Python
-packages.
-
-Please share your experiences with us! If you encounter difficulty installing
-a package, please contact us via the `distutils mailing list
-<http://mail.python.org/pipermail/distutils-sig/>`_.  (Note: please DO NOT send
-private email directly to the author of setuptools; it will be discarded.  The
-mailing list is a searchable archive of previously-asked and answered
-questions; you should begin your research there before reporting something as a
-bug -- and then do so via list discussion first.)
-
-(Also, if you'd like to learn about how you can use ``setuptools`` to make your
-own packages work better with EasyInstall, or provide EasyInstall-like features
-without requiring your users to use EasyInstall directly, you'll probably want
-to check out the full `setuptools`_ documentation as well.)
-
-.. contents:: **Table of Contents**
-
-
-Using "Easy Install"
-====================
-
-
-.. _installation instructions:
-
-Installing "Easy Install"
--------------------------
-
-Please see the `setuptools PyPI page <http://pypi.python.org/pypi/setuptools>`_
-for download links and basic installation instructions for each of the
-supported platforms.
-
-You will need at least Python 2.3.5, or if you are on a 64-bit platform, Python
-2.4.  An ``easy_install`` script will be installed in the normal location for
-Python scripts on your platform.
-
-Note that the instructions on the setuptools PyPI page assume that you are
-are installling to Python's primary ``site-packages`` directory.  If this is
-not the case, you should consult the section below on `Custom Installation
-Locations`_ before installing.  (And, on Windows, you should not use the
-``.exe`` installer when installing to an alternate location.)
-
-Note that ``easy_install`` normally works by downloading files from the
-internet.  If you are behind an NTLM-based firewall that prevents Python
-programs from accessing the net directly, you may wish to first install and use
-the `APS proxy server <http://ntlmaps.sf.net/>`_, which lets you get past such
-firewalls in the same way that your web browser(s) do.
-
-(Alternately, if you do not wish easy_install to actually download anything, you
-can restrict it from doing so with the ``--allow-hosts`` option; see the
-sections on `restricting downloads with --allow-hosts`_ and `command-line
-options`_ for more details.)
-
-
-Troubleshooting
-~~~~~~~~~~~~~~~
-
-If EasyInstall/setuptools appears to install correctly, and you can run the
-``easy_install`` command but it fails with an ``ImportError``, the most likely
-cause is that you installed to a location other than ``site-packages``,
-without taking any of the steps described in the `Custom Installation
-Locations`_ section below.  Please see that section and follow the steps to
-make sure that your custom location will work correctly.  Then re-install.
-
-Similarly, if you can run ``easy_install``, and it appears to be installing
-packages, but then you can't import them, the most likely issue is that you
-installed EasyInstall correctly but are using it to install packages to a
-non-standard location that hasn't been properly prepared.  Again, see the
-section on `Custom Installation Locations`_ for more details.
-
-
-Windows Notes
-~~~~~~~~~~~~~
-
-On Windows, an ``easy_install.exe`` launcher will also be installed, so that
-you can just type ``easy_install`` as long as it's on your ``PATH``.  If typing
-``easy_install`` at the command prompt doesn't work, check to make sure your
-``PATH`` includes the appropriate ``C:\\Python2X\\Scripts`` directory.  On
-most current versions of Windows, you can change the ``PATH`` by right-clicking
-"My Computer", choosing "Properties" and selecting the "Advanced" tab, then
-clicking the "Environment Variables" button.  ``PATH`` will be in the "System
-Variables" section, and you will need to exit and restart your command shell
-(command.com, cmd.exe, bash, or other) for the change to take effect.  Be sure
-to add a ``;`` after the last item on ``PATH`` before adding the scripts
-directory to it.
-
-Note that instead of changing your ``PATH`` to include the Python scripts
-directory, you can also retarget the installation location for scripts so they
-go on a directory that's already on the ``PATH``.  For more information see the
-sections below on `Command-Line Options`_ and `Configuration Files`_.  You
-can pass command line options (such as ``--script-dir``) to
-``distribute_setup.py`` to control where ``easy_install.exe`` will be installed.
-
-
-
-Downloading and Installing a Package
-------------------------------------
-
-For basic use of ``easy_install``, you need only supply the filename or URL of
-a source distribution or .egg file (`Python Egg`__).
-
-__ http://peak.telecommunity.com/DevCenter/PythonEggs
-
-**Example 1**. Install a package by name, searching PyPI for the latest
-version, and automatically downloading, building, and installing it::
-
-    easy_install SQLObject
-
-**Example 2**. Install or upgrade a package by name and version by finding
-links on a given "download page"::
-
-    easy_install -f http://pythonpaste.org/package_index.html SQLObject
-
-**Example 3**. Download a source distribution from a specified URL,
-automatically building and installing it::
-
-    easy_install http://example.com/path/to/MyPackage-1.2.3.tgz
-
-**Example 4**. Install an already-downloaded .egg file::
-
-    easy_install /my_downloads/OtherPackage-3.2.1-py2.3.egg
-
-**Example 5**.  Upgrade an already-installed package to the latest version
-listed on PyPI::
-
-    easy_install --upgrade PyProtocols
-
-**Example 6**.  Install a source distribution that's already downloaded and
-extracted in the current directory (New in 0.5a9)::
-
-    easy_install .
-
-**Example 7**.  (New in 0.6a1) Find a source distribution or Subversion
-checkout URL for a package, and extract it or check it out to
-``~/projects/sqlobject`` (the name will always be in all-lowercase), where it
-can be examined or edited.  (The package will not be installed, but it can
-easily be installed with ``easy_install ~/projects/sqlobject``.  See `Editing
-and Viewing Source Packages`_ below for more info.)::
-
-    easy_install --editable --build-directory ~/projects SQLObject
-
-**Example 7**. (New in 0.6.11) Install a distribution within your home dir::
-
-    easy_install --user SQLAlchemy
-
-Easy Install accepts URLs, filenames, PyPI package names (i.e., ``distutils``
-"distribution" names), and package+version specifiers.  In each case, it will
-attempt to locate the latest available version that meets your criteria.
-
-When downloading or processing downloaded files, Easy Install recognizes
-distutils source distribution files with extensions of .tgz, .tar, .tar.gz,
-.tar.bz2, or .zip.  And of course it handles already-built .egg
-distributions as well as ``.win32.exe`` installers built using distutils.
-
-By default, packages are installed to the running Python installation's
-``site-packages`` directory, unless you provide the ``-d`` or ``--install-dir``
-option to specify an alternative directory, or specify an alternate location
-using distutils configuration files.  (See `Configuration Files`_, below.)
-
-By default, any scripts included with the package are installed to the running
-Python installation's standard script installation location.  However, if you
-specify an installation directory via the command line or a config file, then
-the default directory for installing scripts will be the same as the package
-installation directory, to ensure that the script will have access to the
-installed package.  You can override this using the ``-s`` or ``--script-dir``
-option.
-
-Installed packages are added to an ``easy-install.pth`` file in the install
-directory, so that Python will always use the most-recently-installed version
-of the package.  If you would like to be able to select which version to use at
-runtime, you should use the ``-m`` or ``--multi-version`` option.
-
-
-Upgrading a Package
--------------------
-
-You don't need to do anything special to upgrade a package: just install the
-new version, either by requesting a specific version, e.g.::
-
-    easy_install "SomePackage==2.0"
-
-a version greater than the one you have now::
-
-    easy_install "SomePackage>2.0"
-
-using the upgrade flag, to find the latest available version on PyPI::
-
-    easy_install --upgrade SomePackage
-
-or by using a download page, direct download URL, or package filename::
-
-    easy_install -f http://example.com/downloads ExamplePackage
-
-    easy_install http://example.com/downloads/ExamplePackage-2.0-py2.4.egg
-
-    easy_install my_downloads/ExamplePackage-2.0.tgz
-
-If you're using ``-m`` or ``--multi-version`` , using the ``require()``
-function at runtime automatically selects the newest installed version of a
-package that meets your version criteria.  So, installing a newer version is
-the only step needed to upgrade such packages.
-
-If you're installing to a directory on PYTHONPATH, or a configured "site"
-directory (and not using ``-m``), installing a package automatically replaces
-any previous version in the ``easy-install.pth`` file, so that Python will
-import the most-recently installed version by default.  So, again, installing
-the newer version is the only upgrade step needed.
-
-If you haven't suppressed script installation (using ``--exclude-scripts`` or
-``-x``), then the upgraded version's scripts will be installed, and they will
-be automatically patched to ``require()`` the corresponding version of the
-package, so that you can use them even if they are installed in multi-version
-mode.
-
-``easy_install`` never actually deletes packages (unless you're installing a
-package with the same name and version number as an existing package), so if
-you want to get rid of older versions of a package, please see `Uninstalling
-Packages`_, below.
-
-
-Changing the Active Version
----------------------------
-
-If you've upgraded a package, but need to revert to a previously-installed
-version, you can do so like this::
-
-    easy_install PackageName==1.2.3
-
-Where ``1.2.3`` is replaced by the exact version number you wish to switch to.
-If a package matching the requested name and version is not already installed
-in a directory on ``sys.path``, it will be located via PyPI and installed.
-
-If you'd like to switch to the latest installed version of ``PackageName``, you
-can do so like this::
-
-    easy_install PackageName
-
-This will activate the latest installed version.  (Note: if you have set any
-``find_links`` via distutils configuration files, those download pages will be
-checked for the latest available version of the package, and it will be
-downloaded and installed if it is newer than your current version.)
-
-Note that changing the active version of a package will install the newly
-active version's scripts, unless the ``--exclude-scripts`` or ``-x`` option is
-specified.
-
-
-Uninstalling Packages
----------------------
-
-If you have replaced a package with another version, then you can just delete
-the package(s) you don't need by deleting the PackageName-versioninfo.egg file
-or directory (found in the installation directory).
-
-If you want to delete the currently installed version of a package (or all
-versions of a package), you should first run::
-
-    easy_install -m PackageName
-
-This will ensure that Python doesn't continue to search for a package you're
-planning to remove. After you've done this, you can safely delete the .egg
-files or directories, along with any scripts you wish to remove.
-
-
-Managing Scripts
-----------------
-
-Whenever you install, upgrade, or change versions of a package, EasyInstall
-automatically installs the scripts for the selected package version, unless
-you tell it not to with ``-x`` or ``--exclude-scripts``.  If any scripts in
-the script directory have the same name, they are overwritten.
-
-Thus, you do not normally need to manually delete scripts for older versions of
-a package, unless the newer version of the package does not include a script
-of the same name.  However, if you are completely uninstalling a package, you
-may wish to manually delete its scripts.
-
-EasyInstall's default behavior means that you can normally only run scripts
-from one version of a package at a time.  If you want to keep multiple versions
-of a script available, however, you can simply use the ``--multi-version`` or
-``-m`` option, and rename the scripts that EasyInstall creates.  This works
-because EasyInstall installs scripts as short code stubs that ``require()`` the
-matching version of the package the script came from, so renaming the script
-has no effect on what it executes.
-
-For example, suppose you want to use two versions of the ``rst2html`` tool
-provided by the `docutils <http://docutils.sf.net/>`_ package.  You might
-first install one version::
-
-    easy_install -m docutils==0.3.9
-
-then rename the ``rst2html.py`` to ``r2h_039``, and install another version::
-
-    easy_install -m docutils==0.3.10
-
-This will create another ``rst2html.py`` script, this one using docutils
-version 0.3.10 instead of 0.3.9.  You now have two scripts, each using a
-different version of the package.  (Notice that we used ``-m`` for both
-installations, so that Python won't lock us out of using anything but the most
-recently-installed version of the package.)
-
-
-
-Tips & Techniques
------------------
-
-
-Multiple Python Versions
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-As of version 0.6a11, EasyInstall installs itself under two names:
-``easy_install`` and ``easy_install-N.N``, where ``N.N`` is the Python version
-used to install it.  Thus, if you install EasyInstall for both Python 2.3 and
-2.4, you can use the ``easy_install-2.3`` or ``easy_install-2.4`` scripts to
-install packages for Python 2.3 or 2.4, respectively.
-
-Also, if you're working with Python version 2.4 or higher, you can run Python
-with ``-m easy_install`` to run that particular Python version's
-``easy_install`` command.
-
-
-Restricting Downloads with ``--allow-hosts``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You can use the ``--allow-hosts`` (``-H``) option to restrict what domains
-EasyInstall will look for links and downloads on.  ``--allow-hosts=None``
-prevents downloading altogether.  You can also use wildcards, for example
-to restrict downloading to hosts in your own intranet.  See the section below
-on `Command-Line Options`_ for more details on the ``--allow-hosts`` option.
-
-By default, there are no host restrictions in effect, but you can change this
-default by editing the appropriate `configuration files`_ and adding:
-
-.. code-block:: ini
-
-    [easy_install]
-    allow_hosts = *.myintranet.example.com,*.python.org
-
-The above example would then allow downloads only from hosts in the
-``python.org`` and ``myintranet.example.com`` domains, unless overridden on the
-command line.
-
-
-Installing on Un-networked Machines
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Just copy the eggs or source packages you need to a directory on the target
-machine, then use the ``-f`` or ``--find-links`` option to specify that
-directory's location.  For example::
-
-    easy_install -H None -f somedir SomePackage
-
-will attempt to install SomePackage using only eggs and source packages found
-in ``somedir`` and disallowing all remote access.  You should of course make
-sure you have all of SomePackage's dependencies available in somedir.
-
-If you have another machine of the same operating system and library versions
-(or if the packages aren't platform-specific), you can create the directory of
-eggs using a command like this::
-
-    easy_install -zmaxd somedir SomePackage
-
-This will tell EasyInstall to put zipped eggs or source packages for
-SomePackage and all its dependencies into ``somedir``, without creating any
-scripts or .pth files.  You can then copy the contents of ``somedir`` to the
-target machine.  (``-z`` means zipped eggs, ``-m`` means multi-version, which
-prevents .pth files from being used, ``-a`` means to copy all the eggs needed,
-even if they're installed elsewhere on the machine, and ``-d`` indicates the
-directory to place the eggs in.)
-
-You can also build the eggs from local development packages that were installed
-with the ``setup.py develop`` command, by including the ``-l`` option, e.g.::
-
-    easy_install -zmaxld somedir SomePackage
-
-This will use locally-available source distributions to build the eggs.
-
-
-Packaging Others' Projects As Eggs
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Need to distribute a package that isn't published in egg form?  You can use
-EasyInstall to build eggs for a project.  You'll want to use the ``--zip-ok``,
-``--exclude-scripts``, and possibly ``--no-deps`` options (``-z``, ``-x`` and
-``-N``, respectively).  Use ``-d`` or ``--install-dir`` to specify the location
-where you'd like the eggs placed.  By placing them in a directory that is
-published to the web, you can then make the eggs available for download, either
-in an intranet or to the internet at large.
-
-If someone distributes a package in the form of a single ``.py`` file, you can
-wrap it in an egg by tacking an ``#egg=name-version`` suffix on the file's URL.
-So, something like this::
-
-    easy_install -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
-
-will install the package as an egg, and this::
-
-    easy_install -zmaxd. \
-        -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
-
-will create a ``.egg`` file in the current directory.
-
-
-Creating your own Package Index
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In addition to local directories and the Python Package Index, EasyInstall can
-find download links on most any web page whose URL is given to the ``-f``
-(``--find-links``) option.  In the simplest case, you can simply have a web
-page with links to eggs or Python source packages, even an automatically
-generated directory listing (such as the Apache web server provides).
-
-If you are setting up an intranet site for package downloads, you may want to
-configure the target machines to use your download site by default, adding
-something like this to their `configuration files`_:
-
-.. code-block:: ini
-
-    [easy_install]
-    find_links = http://mypackages.example.com/somedir/
-                 http://turbogears.org/download/
-                 http://peak.telecommunity.com/dist/
-
-As you can see, you can list multiple URLs separated by whitespace, continuing
-on multiple lines if necessary (as long as the subsequent lines are indented.
-
-If you are more ambitious, you can also create an entirely custom package index
-or PyPI mirror.  See the ``--index-url`` option under `Command-Line Options`_,
-below, and also the section on `Package Index "API"`_.
-
-
-Password-Protected Sites
-------------------------
-
-If a site you want to download from is password-protected using HTTP "Basic"
-authentication, you can specify your credentials in the URL, like so::
-
-    http://some_userid:some_password@some.example.com/some_path/
-
-You can do this with both index page URLs and direct download URLs.  As long
-as any HTML pages read by easy_install use *relative* links to point to the
-downloads, the same user ID and password will be used to do the downloading.
-
-
-Controlling Build Options
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall respects standard distutils `Configuration Files`_, so you can use
-them to configure build options for packages that it installs from source.  For
-example, if you are on Windows using the MinGW compiler, you can configure the
-default compiler by putting something like this:
-
-.. code-block:: ini
-
-    [build]
-    compiler = mingw32
-
-into the appropriate distutils configuration file.  In fact, since this is just
-normal distutils configuration, it will affect any builds using that config
-file, not just ones done by EasyInstall.  For example, if you add those lines
-to ``distutils.cfg`` in the ``distutils`` package directory, it will be the
-default compiler for *all* packages you build.  See `Configuration Files`_
-below for a list of the standard configuration file locations, and links to
-more documentation on using distutils configuration files.
-
-
-Editing and Viewing Source Packages
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sometimes a package's source distribution  contains additional documentation,
-examples, configuration files, etc., that are not part of its actual code.  If
-you want to be able to examine these files, you can use the ``--editable``
-option to EasyInstall, and EasyInstall will look for a source distribution
-or Subversion URL for the package, then download and extract it or check it out
-as a subdirectory of the ``--build-directory`` you specify.  If you then wish
-to install the package after editing or configuring it, you can do so by
-rerunning EasyInstall with that directory as the target.
-
-Note that using ``--editable`` stops EasyInstall from actually building or
-installing the package; it just finds, obtains, and possibly unpacks it for
-you.  This allows you to make changes to the package if necessary, and to
-either install it in development mode using ``setup.py develop`` (if the
-package uses setuptools, that is), or by running ``easy_install projectdir``
-(where ``projectdir`` is the subdirectory EasyInstall created for the
-downloaded package.
-
-In order to use ``--editable`` (``-e`` for short), you *must* also supply a
-``--build-directory`` (``-b`` for short).  The project will be placed in a
-subdirectory of the build directory.  The subdirectory will have the same
-name as the project itself, but in all-lowercase.  If a file or directory of
-that name already exists, EasyInstall will print an error message and exit.
-
-Also, when using ``--editable``, you cannot use URLs or filenames as arguments.
-You *must* specify project names (and optional version requirements) so that
-EasyInstall knows what directory name(s) to create.  If you need to force
-EasyInstall to use a particular URL or filename, you should specify it as a
-``--find-links`` item (``-f`` for short), and then also specify
-the project name, e.g.::
-
-    easy_install -eb ~/projects \
-     -fhttp://prdownloads.sourceforge.net/ctypes/ctypes-0.9.6.tar.gz?download \
-     ctypes==0.9.6
-
-
-Dealing with Installation Conflicts
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-(NOTE: As of 0.6a11, this section is obsolete; it is retained here only so that
-people using older versions of EasyInstall can consult it.  As of version
-0.6a11, installation conflicts are handled automatically without deleting the
-old or system-installed packages, and without ignoring the issue.  Instead,
-eggs are automatically shifted to the front of ``sys.path`` using special
-code added to the ``easy-install.pth`` file.  So, if you are using version
-0.6a11 or better of setuptools, you do not need to worry about conflicts,
-and the following issues do not apply to you.)
-
-EasyInstall installs distributions in a "managed" way, such that each
-distribution can be independently activated or deactivated on ``sys.path``.
-However, packages that were not installed by EasyInstall are "unmanaged",
-in that they usually live all in one directory and cannot be independently
-activated or deactivated.
-
-As a result, if you are using EasyInstall to upgrade an existing package, or
-to install a package with the same name as an existing package, EasyInstall
-will warn you of the conflict.  (This is an improvement over ``setup.py
-install``, becuase the ``distutils`` just install new packages on top of old
-ones, possibly combining two unrelated packages or leaving behind modules that
-have been deleted in the newer version of the package.)
-
-By default, EasyInstall will stop the installation if it detects a conflict
-between an existing, "unmanaged" package, and a module or package in any of
-the distributions you're installing.  It will display a list of all of the
-existing files and directories that would need to be deleted for the new
-package to be able to function correctly.  You can then either delete these
-conflicting files and directories yourself and re-run EasyInstall, or you can
-just use the ``--delete-conflicting`` or ``--ignore-conflicts-at-my-risk``
-options, as described under `Command-Line Options`_, below.
-
-Of course, once you've replaced all of your existing "unmanaged" packages with
-versions managed by EasyInstall, you won't have any more conflicts to worry
-about!
-
-
-Compressed Installation
-~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall tries to install packages in zipped form, if it can.  Zipping
-packages can improve Python's overall import performance if you're not using
-the ``--multi-version`` option, because Python processes zipfile entries on
-``sys.path`` much faster than it does directories.
-
-As of version 0.5a9, EasyInstall analyzes packages to determine whether they
-can be safely installed as a zipfile, and then acts on its analysis.  (Previous
-versions would not install a package as a zipfile unless you used the
-``--zip-ok`` option.)
-
-The current analysis approach is fairly conservative; it currenly looks for:
-
- * Any use of the ``__file__`` or ``__path__`` variables (which should be
-   replaced with ``pkg_resources`` API calls)
-
- * Possible use of ``inspect`` functions that expect to manipulate source files
-   (e.g. ``inspect.getsource()``)
-
- * Top-level modules that might be scripts used with ``python -m`` (Python 2.4)
-
-If any of the above are found in the package being installed, EasyInstall will
-assume that the package cannot be safely run from a zipfile, and unzip it to
-a directory instead.  You can override this analysis with the ``-zip-ok`` flag,
-which will tell EasyInstall to install the package as a zipfile anyway.  Or,
-you can use the ``--always-unzip`` flag, in which case EasyInstall will always
-unzip, even if its analysis says the package is safe to run as a zipfile.
-
-Normally, however, it is simplest to let EasyInstall handle the determination
-of whether to zip or unzip, and only specify overrides when needed to work
-around a problem.  If you find you need to override EasyInstall's guesses, you
-may want to contact the package author and the EasyInstall maintainers, so that
-they can make appropriate changes in future versions.
-
-(Note: If a package uses ``setuptools`` in its setup script, the package author
-has the option to declare the package safe or unsafe for zipped usage via the
-``zip_safe`` argument to ``setup()``.  If the package author makes such a
-declaration, EasyInstall believes the package's author and does not perform its
-own analysis.  However, your command-line option, if any, will still override
-the package author's choice.)
-
-
-Reference Manual
-================
-
-Configuration Files
--------------------
-
-(New in 0.4a2)
-
-You may specify default options for EasyInstall using the standard
-distutils configuration files, under the command heading ``easy_install``.
-EasyInstall will look first for a ``setup.cfg`` file in the current directory,
-then a ``~/.pydistutils.cfg`` or ``$HOME\\pydistutils.cfg`` (on Unix-like OSes
-and Windows, respectively), and finally a ``distutils.cfg`` file in the
-``distutils`` package directory.  Here's a simple example:
-
-.. code-block:: ini
-
-    [easy_install]
-
-    # set the default location to install packages
-    install_dir = /home/me/lib/python
-
-    # Notice that indentation can be used to continue an option
-    # value; this is especially useful for the "--find-links"
-    # option, which tells easy_install to use download links on
-    # these pages before consulting PyPI:
-    #
-    find_links = http://sqlobject.org/
-                 http://peak.telecommunity.com/dist/
-
-In addition to accepting configuration for its own options under
-``[easy_install]``, EasyInstall also respects defaults specified for other
-distutils commands.  For example, if you don't set an ``install_dir`` for
-``[easy_install]``, but *have* set an ``install_lib`` for the ``[install]``
-command, this will become EasyInstall's default installation directory.  Thus,
-if you are already using distutils configuration files to set default install
-locations, build options, etc., EasyInstall will respect your existing settings
-until and unless you override them explicitly in an ``[easy_install]`` section.
-
-For more information, see also the current Python documentation on the `use and
-location of distutils configuration files <http://docs.python.org/inst/config-syntax.html>`_.
-
-Notice that ``easy_install`` will use the ``setup.cfg`` from the current
-working directory only if it was triggered from ``setup.py`` through the
-``install_requires`` option. The standalone command will not use that file.
-
-Command-Line Options
---------------------
-
-``--zip-ok, -z``
-    Install all packages as zip files, even if they are marked as unsafe for
-    running as a zipfile.  This can be useful when EasyInstall's analysis
-    of a non-setuptools package is too conservative, but keep in mind that
-    the package may not work correctly.  (Changed in 0.5a9; previously this
-    option was required in order for zipped installation to happen at all.)
-
-``--always-unzip, -Z``
-    Don't install any packages as zip files, even if the packages are marked
-    as safe for running as a zipfile.  This can be useful if a package does
-    something unsafe, but not in a way that EasyInstall can easily detect.
-    EasyInstall's default analysis is currently very conservative, however, so
-    you should only use this option if you've had problems with a particular
-    package, and *after* reporting the problem to the package's maintainer and
-    to the EasyInstall maintainers.
-
-    (Note: the ``-z/-Z`` options only affect the installation of newly-built
-    or downloaded packages that are not already installed in the target
-    directory; if you want to convert an existing installed version from
-    zipped to unzipped or vice versa, you'll need to delete the existing
-    version first, and re-run EasyInstall.)
-
-``--multi-version, -m``
-    "Multi-version" mode. Specifying this option prevents ``easy_install`` from
-    adding an ``easy-install.pth`` entry for the package being installed, and
-    if an entry for any version the package already exists, it will be removed
-    upon successful installation. In multi-version mode, no specific version of
-    the package is available for importing, unless you use
-    ``pkg_resources.require()`` to put it on ``sys.path``. This can be as
-    simple as::
-
-        from pkg_resources import require
-        require("SomePackage", "OtherPackage", "MyPackage")
-
-    which will put the latest installed version of the specified packages on
-    ``sys.path`` for you. (For more advanced uses, like selecting specific
-    versions and enabling optional dependencies, see the ``pkg_resources`` API
-    doc.)
-
-    Changed in 0.6a10: this option is no longer silently enabled when
-    installing to a non-PYTHONPATH, non-"site" directory.  You must always
-    explicitly use this option if you want it to be active.
-
-``--upgrade, -U``   (New in 0.5a4)
-    By default, EasyInstall only searches online if a project/version
-    requirement can't be met by distributions already installed
-    on sys.path or the installation directory.  However, if you supply the
-    ``--upgrade`` or ``-U`` flag, EasyInstall will always check the package
-    index and ``--find-links`` URLs before selecting a version to install.  In
-    this way, you can force EasyInstall to use the latest available version of
-    any package it installs (subject to any version requirements that might
-    exclude such later versions).
-
-``--install-dir=DIR, -d DIR``
-    Set the installation directory. It is up to you to ensure that this
-    directory is on ``sys.path`` at runtime, and to use
-    ``pkg_resources.require()`` to enable the installed package(s) that you
-    need.
-
-    (New in 0.4a2) If this option is not directly specified on the command line
-    or in a distutils configuration file, the distutils default installation
-    location is used.  Normally, this would be the ``site-packages`` directory,
-    but if you are using distutils configuration files, setting things like
-    ``prefix`` or ``install_lib``, then those settings are taken into
-    account when computing the default installation directory, as is the
-    ``--prefix`` option.
-
-``--script-dir=DIR, -s DIR``
-    Set the script installation directory.  If you don't supply this option
-    (via the command line or a configuration file), but you *have* supplied
-    an ``--install-dir`` (via command line or config file), then this option
-    defaults to the same directory, so that the scripts will be able to find
-    their associated package installation.  Otherwise, this setting defaults
-    to the location where the distutils would normally install scripts, taking
-    any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
-    Don't install scripts.  This is useful if you need to install multiple
-    versions of a package, but do not want to reset the version that will be
-    run by scripts that are already installed.
-
-``--user`` (New in 0.6.11)
-    Use the the user-site-packages as specified in :pep:`370`
-    instead of the global site-packages.
-
-``--always-copy, -a``   (New in 0.5a4)
-    Copy all needed distributions to the installation directory, even if they
-    are already present in a directory on sys.path.  In older versions of
-    EasyInstall, this was the default behavior, but now you must explicitly
-    request it.  By default, EasyInstall will no longer copy such distributions
-    from other sys.path directories to the installation directory, unless you
-    explicitly gave the distribution's filename on the command line.
-
-    Note that as of 0.6a10, using this option excludes "system" and
-    "development" eggs from consideration because they can't be reliably
-    copied.  This may cause EasyInstall to choose an older version of a package
-    than what you expected, or it may cause downloading and installation of a
-    fresh copy of something that's already installed.  You will see warning
-    messages for any eggs that EasyInstall skips, before it falls back to an
-    older version or attempts to download a fresh copy.
-
-``--find-links=URLS_OR_FILENAMES, -f URLS_OR_FILENAMES``
-    Scan the specified "download pages" or directories for direct links to eggs
-    or other distributions.  Any existing file or directory names or direct
-    download URLs are immediately added to EasyInstall's search cache, and any
-    indirect URLs (ones that don't point to eggs or other recognized archive
-    formats) are added to a list of additional places to search for download
-    links.  As soon as EasyInstall has to go online to find a package (either
-    because it doesn't exist locally, or because ``--upgrade`` or ``-U`` was
-    used), the specified URLs will be downloaded and scanned for additional
-    direct links.
-
-    Eggs and archives found by way of ``--find-links`` are only downloaded if
-    they are needed to meet a requirement specified on the command line; links
-    to unneeded packages are ignored.
-
-    If all requested packages can be found using links on the specified
-    download pages, the Python Package Index will not be consulted unless you
-    also specified the ``--upgrade`` or ``-U`` option.
-
-    (Note: if you want to refer to a local HTML file containing links, you must
-    use a ``file:`` URL, as filenames that do not refer to a directory, egg, or
-    archive are ignored.)
-
-    You may specify multiple URLs or file/directory names with this option,
-    separated by whitespace.  Note that on the command line, you will probably
-    have to surround the URL list with quotes, so that it is recognized as a
-    single option value.  You can also specify URLs in a configuration file;
-    see `Configuration Files`_, above.
-
-    Changed in 0.6a10: previously all URLs and directories passed to this
-    option were scanned as early as possible, but from 0.6a10 on, only
-    directories and direct archive links are scanned immediately; URLs are not
-    retrieved unless a package search was already going to go online due to a
-    package not being available locally, or due to the use of the ``--update``
-    or ``-U`` option.
-
-``--no-find-links`` Blocks the addition of any link. (New in Distribute 0.6.11)
-    This is useful if you want to avoid adding links defined in a project
-    easy_install is installing (wether it's a requested project or a
-    dependency.). When used, ``--find-links`` is ignored.
-
-``--delete-conflicting, -D`` (Removed in 0.6a11)
-    (As of 0.6a11, this option is no longer necessary; please do not use it!)
-
-    If you are replacing a package that was previously installed *without*
-    using EasyInstall, the old version may end up on ``sys.path`` before the
-    version being installed with EasyInstall.  EasyInstall will normally abort
-    the installation of a package if it detects such a conflict, and ask you to
-    manually remove the conflicting files or directories.  If you specify this
-    option, however, EasyInstall will attempt to delete the files or
-    directories itself, and then proceed with the installation.
-
-``--ignore-conflicts-at-my-risk`` (Removed in 0.6a11)
-    (As of 0.6a11, this option is no longer necessary; please do not use it!)
-
-    Ignore conflicting packages and proceed with installation anyway, even
-    though it means the package probably won't work properly.  If the
-    conflicting package is in a directory you can't write to, this may be your
-    only option, but you will need to take more invasive measures to get the
-    installed package to work, like manually adding it to ``PYTHONPATH`` or to
-    ``sys.path`` at runtime.
-
-``--index-url=URL, -i URL`` (New in 0.4a1; default changed in 0.6c7)
-    Specifies the base URL of the Python Package Index.  The default is
-    http://pypi.python.org/simple if not specified.  When a package is requested
-    that is not locally available or linked from a ``--find-links`` download
-    page, the package index will be searched for download pages for the needed
-    package, and those download pages will be searched for links to download
-    an egg or source distribution.
-
-``--editable, -e`` (New in 0.6a1)
-    Only find and download source distributions for the specified projects,
-    unpacking them to subdirectories of the specified ``--build-directory``.
-    EasyInstall will not actually build or install the requested projects or
-    their dependencies; it will just find and extract them for you.  See
-    `Editing and Viewing Source Packages`_ above for more details.
-
-``--build-directory=DIR, -b DIR`` (UPDATED in 0.6a1)
-    Set the directory used to build source packages.  If a package is built
-    from a source distribution or checkout, it will be extracted to a
-    subdirectory of the specified directory.  The subdirectory will have the
-    same name as the extracted distribution's project, but in all-lowercase.
-    If a file or directory of that name already exists in the given directory,
-    a warning will be printed to the console, and the build will take place in
-    a temporary directory instead.
-
-    This option is most useful in combination with the ``--editable`` option,
-    which forces EasyInstall to *only* find and extract (but not build and
-    install) source distributions.  See `Editing and Viewing Source Packages`_,
-    above, for more information.
-
-``--verbose, -v, --quiet, -q`` (New in 0.4a4)
-    Control the level of detail of EasyInstall's progress messages.  The
-    default detail level is "info", which prints information only about
-    relatively time-consuming operations like running a setup script, unpacking
-    an archive, or retrieving a URL.  Using ``-q`` or ``--quiet`` drops the
-    detail level to "warn", which will only display installation reports,
-    warnings, and errors.  Using ``-v`` or ``--verbose`` increases the detail
-    level to include individual file-level operations, link analysis messages,
-    and distutils messages from any setup scripts that get run.  If you include
-    the ``-v`` option more than once, the second and subsequent uses are passed
-    down to any setup scripts, increasing the verbosity of their reporting as
-    well.
-
-``--dry-run, -n`` (New in 0.4a4)
-    Don't actually install the package or scripts.  This option is passed down
-    to any setup scripts run, so packages should not actually build either.
-    This does *not* skip downloading, nor does it skip extracting source
-    distributions to a temporary/build directory.
-
-``--optimize=LEVEL``, ``-O LEVEL`` (New in 0.4a4)
-    If you are installing from a source distribution, and are *not* using the
-    ``--zip-ok`` option, this option controls the optimization level for
-    compiling installed ``.py`` files to ``.pyo`` files.  It does not affect
-    the compilation of modules contained in ``.egg`` files, only those in
-    ``.egg`` directories.  The optimization level can be set to 0, 1, or 2;
-    the default is 0 (unless it's set under ``install`` or ``install_lib`` in
-    one of your distutils configuration files).
-
-``--record=FILENAME``  (New in 0.5a4)
-    Write a record of all installed files to FILENAME.  This is basically the
-    same as the same option for the standard distutils "install" command, and
-    is included for compatibility with tools that expect to pass this option
-    to "setup.py install".
-
-``--site-dirs=DIRLIST, -S DIRLIST``   (New in 0.6a1)
-    Specify one or more custom "site" directories (separated by commas).
-    "Site" directories are directories where ``.pth`` files are processed, such
-    as the main Python ``site-packages`` directory.  As of 0.6a10, EasyInstall
-    automatically detects whether a given directory processes ``.pth`` files
-    (or can be made to do so), so you should not normally need to use this
-    option.  It is is now only necessary if you want to override EasyInstall's
-    judgment and force an installation directory to be treated as if it
-    supported ``.pth`` files.
-
-``--no-deps, -N``  (New in 0.6a6)
-    Don't install any dependencies.  This is intended as a convenience for
-    tools that wrap eggs in a platform-specific packaging system.  (We don't
-    recommend that you use it for anything else.)
-
-``--allow-hosts=PATTERNS, -H PATTERNS``   (New in 0.6a6)
-    Restrict downloading and spidering to hosts matching the specified glob
-    patterns.  E.g. ``-H *.python.org`` restricts web access so that only
-    packages listed and downloadable from machines in the ``python.org``
-    domain.  The glob patterns must match the *entire* user/host/port section of
-    the target URL(s).  For example, ``*.python.org`` will NOT accept a URL
-    like ``http://python.org/foo`` or ``http://www.python.org:8080/``.
-    Multiple patterns can be specified by separting them with commas.  The
-    default pattern is ``*``, which matches anything.
-
-    In general, this option is mainly useful for blocking EasyInstall's web
-    access altogether (e.g. ``-Hlocalhost``), or to restrict it to an intranet
-    or other trusted site.  EasyInstall will do the best it can to satisfy
-    dependencies given your host restrictions, but of course can fail if it
-    can't find suitable packages.  EasyInstall displays all blocked URLs, so
-    that you can adjust your ``--allow-hosts`` setting if it is more strict
-    than you intended.  Some sites may wish to define a restrictive default
-    setting for this option in their `configuration files`_, and then manually
-    override the setting on the command line as needed.
-
-``--prefix=DIR`` (New in 0.6a10)
-    Use the specified directory as a base for computing the default
-    installation and script directories.  On Windows, the resulting default
-    directories will be ``prefix\\Lib\\site-packages`` and ``prefix\\Scripts``,
-    while on other platforms the defaults will be
-    ``prefix/lib/python2.X/site-packages`` (with the appropriate version
-    substituted) for libraries and ``prefix/bin`` for scripts.
-
-    Note that the ``--prefix`` option only sets the *default* installation and
-    script directories, and does not override the ones set on the command line
-    or in a configuration file.
-
-``--local-snapshots-ok, -l`` (New in 0.6c6)
-    Normally, EasyInstall prefers to only install *released* versions of
-    projects, not in-development ones, because such projects may not
-    have a currently-valid version number.  So, it usually only installs them
-    when their ``setup.py`` directory is explicitly passed on the command line.
-
-    However, if this option is used, then any in-development projects that were
-    installed using the ``setup.py develop`` command, will be used to build
-    eggs, effectively upgrading the "in-development" project to a snapshot
-    release.  Normally, this option is used only in conjunction with the
-    ``--always-copy`` option to create a distributable snapshot of every egg
-    needed to run an application.
-
-    Note that if you use this option, you must make sure that there is a valid
-    version number (such as an SVN revision number tag) for any in-development
-    projects that may be used, as otherwise EasyInstall may not be able to tell
-    what version of the project is "newer" when future installations or
-    upgrades are attempted.
-
-
-.. _non-root installation:
-
-Custom Installation Locations
------------------------------
-
-By default, EasyInstall installs python packages into Python's main ``site-packages`` directory,
-and manages them using a custom ``.pth`` file in that same directory.
-
-Very often though, a user or developer wants ``easy_install`` to install and manage python packages
-in an alternative location, usually for one of 3 reasons:
-
-1. They don't have access to write to the main Python site-packages directory.
-
-2. They want a user-specific stash of packages, that is not visible to other users.
-
-3. They want to isolate a set of packages to a specific python application, usually to minimize
-   the possibility of version conflicts.
-
-Historically, there have been many approaches to achieve custom installation.
-The following section lists only the easiest and most relevant approaches [1]_.
-
-`Use the "--user" option`_
-
-`Use the "--user" option and customize "PYTHONUSERBASE"`_
-
-`Use "virtualenv"`_
-
-.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by `PEP-370`_ in Python 2.6.
-
-.. _PEP-370: http://www.python.org/dev/peps/pep-0370/
-
-
-Use the "--user" option
-~~~~~~~~~~~~~~~~~~~~~~~
-With Python 2.6 came the User scheme for installation, which means that all
-python distributions support an alternative install location that is specific to a user [2]_ [3]_.
-The Default location for each OS is explained in the python documentation
-for the ``site.USER_BASE`` variable.  This mode of installation can be turned on by
-specifying the ``--user`` option to ``setup.py install`` or ``easy_install``.
-This approach serves the need to have a user-specific stash of packages.
-
-.. [2] Prior to Python2.6, Mac OS X offered a form of the User scheme. That is now subsumed into the User scheme introduced in Python 2.6.
-.. [3] Prior to the User scheme, there was the Home scheme, which is still available, but requires more effort than the User scheme to get packages recognized.
-
-Use the "--user" option and customize "PYTHONUSERBASE"
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The User scheme install location can be customized by setting the ``PYTHONUSERBASE`` environment
-variable, which updates the value of ``site.USER_BASE``.  To isolate packages to a specific
-application, simply set the OS environment of that application to a specific value of
-``PYTHONUSERBASE``, that contains just those packages.
-
-Use "virtualenv"
-~~~~~~~~~~~~~~~~
-"virtualenv" is a 3rd-party python package that effectively "clones" a python installation, thereby
-creating an isolated location to intall packages.  The evolution of "virtualenv" started before the existence
-of the User installation scheme.  "virtualenv" provides a version of ``easy_install`` that is
-scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features
-that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages.
-
-Please refer to the `virtualenv`_ documentation for more details.
-
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
-
-
-
-Package Index "API"
--------------------
-
-Custom package indexes (and PyPI) must follow the following rules for
-EasyInstall to be able to look up and download packages:
-
-1. Except where stated otherwise, "pages" are HTML or XHTML, and "links"
-   refer to ``href`` attributes.
-
-2. Individual project version pages' URLs must be of the form
-   ``base/projectname/version``, where ``base`` is the package index's base URL.
-
-3. Omitting the ``/version`` part of a project page's URL (but keeping the
-   trailing ``/``) should result in a page that is either:
-
-   a) The single active version of that project, as though the version had been
-      explicitly included, OR
-
-   b) A page with links to all of the active version pages for that project.
-
-4. Individual project version pages should contain direct links to downloadable
-   distributions where possible.  It is explicitly permitted for a project's
-   "long_description" to include URLs, and these should be formatted as HTML
-   links by the package index, as EasyInstall does no special processing to
-   identify what parts of a page are index-specific and which are part of the
-   project's supplied description.
-
-5. Where available, MD5 information should be added to download URLs by
-   appending a fragment identifier of the form ``#md5=...``, where ``...`` is
-   the 32-character hex MD5 digest.  EasyInstall will verify that the
-   downloaded file's MD5 digest matches the given value.
-
-6. Individual project version pages should identify any "homepage" or
-   "download" URLs using ``rel="homepage"`` and ``rel="download"`` attributes
-   on the HTML elements linking to those URLs. Use of these attributes will
-   cause EasyInstall to always follow the provided links, unless it can be
-   determined by inspection that they are downloadable distributions. If the
-   links are not to downloadable distributions, they are retrieved, and if they
-   are HTML, they are scanned for download links. They are *not* scanned for
-   additional "homepage" or "download" links, as these are only processed for
-   pages that are part of a package index site.
-
-7. The root URL of the index, if retrieved with a trailing ``/``, must result
-   in a page containing links to *all* projects' active version pages.
-
-   (Note: This requirement is a workaround for the absence of case-insensitive
-   ``safe_name()`` matching of project names in URL paths. If project names are
-   matched in this fashion (e.g. via the PyPI server, mod_rewrite, or a similar
-   mechanism), then it is not necessary to include this all-packages listing
-   page.)
-
-8. If a package index is accessed via a ``file://`` URL, then EasyInstall will
-   automatically use ``index.html`` files, if present, when trying to read a
-   directory with a trailing ``/`` on the URL.
-
-
-Backward Compatibility
-~~~~~~~~~~~~~~~~~~~~~~
-
-Package indexes that wish to support setuptools versions prior to 0.6b4 should
-also follow these rules:
-
-* Homepage and download links must be preceded with ``"<th>Home Page"`` or
-  ``"<th>Download URL"``, in addition to (or instead of) the ``rel=""``
-  attributes on the actual links.  These marker strings do not need to be
-  visible, or uncommented, however!  For example, the following is a valid
-  homepage link that will work with any version of setuptools::
-
-    <li>
-     <strong>Home Page:</strong>
-     <!-- <th>Home Page -->
-     <a rel="homepage" href="http://sqlobject.org">http://sqlobject.org</a>
-    </li>
-
-  Even though the marker string is in an HTML comment, older versions of
-  EasyInstall will still "see" it and know that the link that follows is the
-  project's home page URL.
-
-* The pages described by paragraph 3(b) of the preceding section *must*
-  contain the string ``"Index of Packages</title>"`` somewhere in their text.
-  This can be inside of an HTML comment, if desired, and it can be anywhere
-  in the page.  (Note: this string MUST NOT appear on normal project pages, as
-  described in paragraphs 2 and 3(a)!)
-
-In addition, for compatibility with PyPI versions that do not use ``#md5=``
-fragment IDs, EasyInstall uses the following regular expression to match PyPI's
-displayed MD5 info (broken onto two lines for readability)::
-
-    <a href="([^"#]+)">([^<]+)</a>\n\s+\(<a href="[^?]+\?:action=show_md5
-    &amp;digest=([0-9a-f]{32})">md5</a>\)
-
-History
-=======
-
-0.6c9
- * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting
-   is flattened out in the resulting egg.  (There was a case-sensitivity
-   problem that affected some distributions, notably ``pywin32``.)
-
- * Prevent ``--help-commands`` and other junk from showing under Python 2.5
-   when running ``easy_install --help``.
-
- * Fixed GUI scripts sometimes not executing on Windows
-
- * Fixed not picking up dependency links from recursive dependencies.
-
- * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs
-
- * Changes for Jython compatibility
-
- * Improved error message when a requirement is also a directory name, but the
-   specified directory is not a source package.
-
- * Fixed ``--allow-hosts`` option blocking ``file:`` URLs
-
- * Fixed HTTP SVN detection failing when the page title included a project
-   name (e.g. on SourceForge-hosted SVN)
-
- * Fix Jython script installation to handle ``#!`` lines better when
-   ``sys.executable`` is a script.
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Keep site directories (e.g. ``site-packages``) from being included in
-   ``.pth`` files.
-
-0.6c7
- * ``ftp:`` download URLs now work correctly.
-
- * The default ``--index-url`` is now ``http://pypi.python.org/simple``, to use
-   the Python Package Index's new simpler (and faster!) REST API.
-
-0.6c6
- * EasyInstall no longer aborts the installation process if a URL it wants to
-   retrieve can't be downloaded, unless the URL is an actual package download.
-   Instead, it issues a warning and tries to keep going.
-
- * Fixed distutils-style scripts originally built on Windows having their line
-   endings doubled when installed on any platform.
-
- * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects
-   installed using ``setup.py develop``.
-
- * Fixed not HTML-decoding URLs scraped from web pages
-
-0.6c5
- * Fixed ``.dll`` files on Cygwin not having executable permisions when an egg
-   is installed unzipped.
-
-0.6c4
- * Added support for HTTP "Basic" authentication using ``http://user:pass@host``
-   URLs.  If a password-protected page contains links to the same host (and
-   protocol), those links will inherit the credentials used to access the
-   original page.
-
- * Removed all special support for Sourceforge mirrors, as Sourceforge's
-   mirror system now works well for non-browser downloads.
-
- * Fixed not recognizing ``win32.exe`` installers that included a custom
-   bitmap.
-
- * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they
-   are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as
-   is done by ``os.urandom()`` on some platforms).
-
- * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable``
-   has a space in it (e.g., the user installed Python to a ``Program Files``
-   directory).
-
-0.6c3
- * You can once again use "python -m easy_install" with Python 2.4 and above.
-
- * Python 2.5 compatibility fixes added.
-
-0.6c2
- * Windows script wrappers now support quoted arguments and arguments
-   containing spaces.  (Patch contributed by Jim Fulton.)
-
- * The ``ez_setup.py`` script now actually works when you put a setuptools
-   ``.egg`` alongside it for bootstrapping an offline machine.
-
- * A writable installation directory on ``sys.path`` is no longer required to
-   download and extract a source distribution using ``--editable``.
-
- * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable``
-   contains non-ASCII characters, to prevent deprecation warnings about an
-   unspecified encoding when the script is run.
-
-0.6c1
- * EasyInstall now includes setuptools version information in the
-   ``User-Agent`` string sent to websites it visits.
-
-0.6b4
- * Fix creating Python wrappers for non-Python scripts
-
- * Fix ``ftp://`` directory listing URLs from causing a crash when used in the
-   "Home page" or "Download URL" slots on PyPI.
-
- * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile
-   or directory is deleted/overwritten.
-
- * Fix not recognizing HTML 404 pages from package indexes.
-
- * Allow ``file://`` URLs to be used as a package index.  URLs that refer to
-   directories will use an internally-generated directory listing if there is
-   no ``index.html`` file in the directory.
-
- * Allow external links in a package index to be specified using
-   ``rel="homepage"`` or ``rel="download"``, without needing the old
-   PyPI-specific visible markup.
-
- * Suppressed warning message about possibly-misspelled project name, if an egg
-   or link for that project name has already been seen.
-
-0.6b3
- * Fix local ``--find-links`` eggs not being copied except with
-   ``--always-copy``.
-
- * Fix sometimes not detecting local packages installed outside of "site"
-   directories.
-
- * Fix mysterious errors during initial ``setuptools`` install, caused by
-   ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru
-   after deleting the egg from which it's running.
-
-0.6b2
- * Don't install or update a ``site.py`` patch when installing to a
-   ``PYTHONPATH`` directory with ``--multi-version``, unless an
-   ``easy-install.pth`` file is already in use there.
-
- * Construct ``.pth`` file paths in such a way that installing an egg whose
-   name begins with ``import`` doesn't cause a syntax error.
-
- * Fixed a bogus warning message that wasn't updated since the 0.5 versions.
-
-0.6b1
- * Better ambiguity management: accept ``#egg`` name/version even if processing
-   what appears to be a correctly-named distutils file, and ignore ``.egg``
-   files with no ``-``, since valid Python ``.egg`` files always have a version
-   number (but Scheme eggs often don't).
-
- * Support ``file://`` links to directories in ``--find-links``, so that
-   easy_install can build packages from local source checkouts.
-
- * Added automatic retry for Sourceforge mirrors.  The new download process is
-   to first just try dl.sourceforge.net, then randomly select mirror IPs and
-   remove ones that fail, until something works.  The removed IPs stay removed
-   for the remainder of the run.
-
- * Ignore bdist_dumb distributions when looking at download URLs.
-
-0.6a11
- * Process ``dependency_links.txt`` if found in a distribution, by adding the
-   URLs to the list for scanning.
-
- * Use relative paths in ``.pth`` files when eggs are being installed to the
-   same directory as the ``.pth`` file.  This maximizes portability of the
-   target directory when building applications that contain eggs.
-
- * Added ``easy_install-N.N`` script(s) for convenience when using multiple
-   Python versions.
-
- * Added automatic handling of installation conflicts.  Eggs are now shifted to
-   the front of sys.path, in an order consistent with where they came from,
-   making EasyInstall seamlessly co-operate with system package managers.
-
-   The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options
-   are now no longer necessary, and will generate warnings at the end of a
-   run if you use them.
-
- * Don't recursively traverse subdirectories given to ``--find-links``.
-
-0.6a10
- * Added exhaustive testing of the install directory, including a spawn test
-   for ``.pth`` file support, and directory writability/existence checks.  This
-   should virtually eliminate the need to set or configure ``--site-dirs``.
-
- * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of
-   RTFM-ing.  :)
-
- * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it
-   manually to make it work.  ``--multi-version`` is no longer a silent
-   default; you must explicitly use it if installing to a non-PYTHONPATH,
-   non-"site" directory.
-
- * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``,
-   ``--install-dir``, and ``--script-dir`` options, whether on the command line
-   or in configuration files.
-
- * Improved SourceForge mirror processing to work faster and be less affected
-   by transient HTML changes made by SourceForge.
-
- * PyPI searches now use the exact spelling of requirements specified on the
-   command line or in a project's ``install_requires``.  Previously, a
-   normalized form of the name was used, which could lead to unnecessary
-   full-index searches when a project's name had an underscore (``_``) in it.
-
- * EasyInstall can now download bare ``.py`` files and wrap them in an egg,
-   as long as you include an ``#egg=name-version`` suffix on the URL, or if
-   the ``.py`` file is listed as the "Download URL" on the project's PyPI page.
-   This allows third parties to "package" trivial Python modules just by
-   linking to them (e.g. from within their own PyPI page or download links
-   page).
-
- * The ``--always-copy`` option now skips "system" and "development" eggs since
-   they can't be reliably copied.  Note that this may cause EasyInstall to
-   choose an older version of a package than what you expected, or it may cause
-   downloading and installation of a fresh version of what's already installed.
-
- * The ``--find-links`` option previously scanned all supplied URLs and
-   directories as early as possible, but now only directories and direct
-   archive links are scanned immediately.  URLs are not retrieved unless a
-   package search was already going to go online due to a package not being
-   available locally, or due to the use of the ``--update`` or ``-U`` option.
-
- * Fixed the annoying ``--help-commands`` wart.
-
-0.6a9
- * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside
-   "baskets") when they weren't explicitly listed in the ``.pth`` file.
-
- * If more than one URL appears to describe the exact same distribution, prefer
-   the shortest one.  This helps to avoid "table of contents" CGI URLs like the
-   ones on effbot.org.
-
- * Quote arguments to python.exe (including python's path) to avoid problems
-   when Python (or a script) is installed in a directory whose name contains
-   spaces on Windows.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
-   format.  Running ``bdist_wininst`` on a setuptools-based package wraps the
-   egg in an .exe that will safely install it as an egg (i.e., with metadata
-   and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
-   back into an ``.egg`` file or directory and install it as such.
-
-0.6a8
- * Update for changed SourceForge mirror format
-
- * Fixed not installing dependencies for some packages fetched via Subversion
-
- * Fixed dependency installation with ``--always-copy`` not using the same
-   dependency resolution procedure as other operations.
-
- * Fixed not fully removing temporary directories on Windows, if a Subversion
-   checkout left read-only files behind
-
- * Fixed some problems building extensions when Pyrex was installed, especially
-   with Python 2.4 and/or packages using SWIG.
-
-0.6a7
- * Fixed not being able to install Windows script wrappers using Python 2.3
-
-0.6a6
- * Added support for "traditional" PYTHONPATH-based non-root installation, and
-   also the convenient ``virtual-python.py`` script, based on a contribution
-   by Ian Bicking.  The setuptools egg now contains a hacked ``site`` module
-   that makes the PYTHONPATH-based approach work with .pth files, so that you
-   can get the full EasyInstall feature set on such installations.
-
- * Added ``--no-deps`` and ``--allow-hosts`` options.
-
- * Improved Windows ``.exe`` script wrappers so that the script can have the
-   same name as a module without confusing Python.
-
- * Changed dependency processing so that it's breadth-first, allowing a
-   depender's preferences to override those of a dependee, to prevent conflicts
-   when a lower version is acceptable to the dependee, but not the depender.
-   Also, ensure that currently installed/selected packages aren't given
-   precedence over ones desired by a package being installed, which could
-   cause conflict errors.
-
-0.6a3
- * Improved error message when trying to use old ways of running
-   ``easy_install``.  Removed the ability to run via ``python -m`` or by
-   running ``easy_install.py``; ``easy_install`` is the command to run on all
-   supported platforms.
-
- * Improved wrapper script generation and runtime initialization so that a
-   VersionConflict doesn't occur if you later install a competing version of a
-   needed package as the default version of that package.
-
- * Fixed a problem parsing version numbers in ``#egg=`` links.
-
-0.6a2
- * EasyInstall can now install "console_scripts" defined by packages that use
-   ``setuptools`` and define appropriate entry points.  On Windows, console
-   scripts get an ``.exe`` wrapper so you can just type their name.  On other
-   platforms, the scripts are installed without a file extension.
-
- * Using ``python -m easy_install`` or running ``easy_install.py`` is now
-   DEPRECATED, since an ``easy_install`` wrapper is now available on all
-   platforms.
-
-0.6a1
- * EasyInstall now does MD5 validation of downloads from PyPI, or from any link
-   that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest.
-
- * EasyInstall now handles symlinks in target directories by removing the link,
-   rather than attempting to overwrite the link's destination.  This makes it
-   easier to set up an alternate Python "home" directory (as described above in
-   the `Non-Root Installation`_ section).
-
- * Added support for handling MacOS platform information in ``.egg`` filenames,
-   based on a contribution by Kevin Dangoor.  You may wish to delete and
-   reinstall any eggs whose filename includes "darwin" and "Power_Macintosh",
-   because the format for this platform information has changed so that minor
-   OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a
-   previous OS version to become obsolete.
-
- * easy_install's dependency processing algorithms have changed.  When using
-   ``--always-copy``, it now ensures that dependencies are copied too.  When
-   not using ``--always-copy``, it tries to use a single resolution loop,
-   rather than recursing.
-
- * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py``
-   extensions.
-
- * Added ``--site-dirs`` option to allow adding custom "site" directories.
-   Made ``easy-install.pth`` work in platform-specific alternate site
-   directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X).
-
- * If you manually delete the current version of a package, the next run of
-   EasyInstall against the target directory will now remove the stray entry
-   from the ``easy-install.pth`` file.
-
- * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID
-   as pointing to the named project's source checkout.  Such URLs have a lower
-   match precedence than any other kind of distribution, so they'll only be
-   used if they have a higher version number than any other available
-   distribution, or if you use the ``--editable`` option.  The ``#egg``
-   fragment can contain a version if it's formatted as ``#egg=proj-ver``,
-   where ``proj`` is the project name, and ``ver`` is the version number.  You
-   *must* use the format for these values that the ``bdist_egg`` command uses;
-   i.e., all non-alphanumeric runs must be condensed to single underscore
-   characters.
-
- * Added the ``--editable`` option; see `Editing and Viewing Source Packages`_
-   above for more info.  Also, slightly changed the behavior of the
-   ``--build-directory`` option.
-
- * Fixed the setup script sandbox facility not recognizing certain paths as
-   valid on case-insensitive platforms.
-
-0.5a12
- * Fix ``python -m easy_install`` not working due to setuptools being installed
-   as a zipfile.  Update safety scanner to check for modules that might be used
-   as ``python -m`` scripts.
-
- * Misc. fixes for win32.exe support, including changes to support Python 2.4's
-   changed ``bdist_wininst`` format.
-
-0.5a10
- * Put the ``easy_install`` module back in as a module, as it's needed for
-   ``python -m`` to run it!
-
- * Allow ``--find-links/-f`` to accept local directories or filenames as well
-   as URLs.
-
-0.5a9
- * EasyInstall now automatically detects when an "unmanaged" package or
-   module is going to be on ``sys.path`` ahead of a package you're installing,
-   thereby preventing the newer version from being imported.  By default, it
-   will abort installation to alert you of the problem, but there are also
-   new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``)
-   available to change the default behavior.  (Note: this new feature doesn't
-   take effect for egg files that were built with older ``setuptools``
-   versions, because they lack the new metadata file required to implement it.)
-
- * The ``easy_install`` distutils command now uses ``DistutilsError`` as its
-   base error type for errors that should just issue a message to stderr and
-   exit the program without a traceback.
-
- * EasyInstall can now be given a path to a directory containing a setup
-   script, and it will attempt to build and install the package there.
-
- * EasyInstall now performs a safety analysis on module contents to determine
-   whether a package is likely to run in zipped form, and displays
-   information about what modules may be doing introspection that would break
-   when running as a zipfile.
-
- * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that
-   would ordinarily be considered safe to unzip, and changed the meaning of
-   ``--zip-ok/-z`` to "always leave everything zipped".
-
-0.5a8
- * There is now a separate documentation page for `setuptools`_; revision
-   history that's not specific to EasyInstall has been moved to that page.
-
- .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
-
-0.5a5
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
-   the ``easy_install`` module to ``setuptools.command.easy_install``.  Note
-   that if you were importing or extending it, you must now change your imports
-   accordingly.  ``easy_install.py`` is still installed as a script, but not as
-   a module.
-
-0.5a4
- * Added ``--always-copy/-a`` option to always copy needed packages to the
-   installation directory, even if they're already present elsewhere on
-   sys.path. (In previous versions, this was the default behavior, but now
-   you must request it.)
-
- * Added ``--upgrade/-U`` option to force checking PyPI for latest available
-   version(s) of all packages requested by name and version, even if a matching
-   version is available locally.
-
- * Added automatic installation of dependencies declared by a distribution
-   being installed.  These dependencies must be listed in the distribution's
-   ``EGG-INFO`` directory, so the distribution has to have declared its
-   dependencies by using setuptools.  If a package has requirements it didn't
-   declare, you'll still have to deal with them yourself.  (E.g., by asking
-   EasyInstall to find and install them.)
-
- * Added the ``--record`` option to ``easy_install`` for the benefit of tools
-   that run ``setup.py install --record=filename`` on behalf of another
-   packaging system.)
-
-0.5a3
- * Fixed not setting script permissions to allow execution.
-
- * Improved sandboxing so that setup scripts that want a temporary directory
-   (e.g. pychecker) can still run in the sandbox.
-
-0.5a2
- * Fix stupid stupid refactoring-at-the-last-minute typos.  :(
-
-0.5a1
- * Added support for converting ``.win32.exe`` installers to eggs on the fly.
-   EasyInstall will now recognize such files by name and install them.
-
- * Fixed a problem with picking the "best" version to install (versions were
-   being sorted as strings, rather than as parsed values)
-
-0.4a4
- * Added support for the distutils "verbose/quiet" and "dry-run" options, as
-   well as the "optimize" flag.
-
- * Support downloading packages that were uploaded to PyPI (by scanning all
-   links on package pages, not just the homepage/download links).
-
-0.4a3
- * Add progress messages to the search/download process so that you can tell
-   what URLs it's reading to find download links.  (Hopefully, this will help
-   people report out-of-date and broken links to package authors, and to tell
-   when they've asked for a package that doesn't exist.)
-
-0.4a2
- * Added support for installing scripts
-
- * Added support for setting options via distutils configuration files, and
-   using distutils' default options as a basis for EasyInstall's defaults.
-
- * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the
-   script installation directory option.
-
- * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if
-   Python includes SSL support.
-
-0.4a1
- * Added ``--scan-url`` and ``--index-url`` options, to scan download pages
-   and search PyPI for needed packages.
-
-0.3a4
- * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single
-   URL installs, to avoid running the wrong setup.py.
-
-0.3a3
- * Added ``--build-directory=DIR/-b DIR`` option.
-
- * Added "installation report" that explains how to use 'require()' when doing
-   a multiversion install or alternate installation directory.
-
- * Added SourceForge mirror auto-select (Contributed by Ian Bicking)
-
- * Added "sandboxing" that stops a setup script from running if it attempts to
-   write to the filesystem outside of the build area
-
- * Added more workarounds for packages with quirky ``install_data`` hacks
-
-0.3a2
- * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as
-   automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking)
-
- * Misc. bug fixes
-
-0.3a1
- * Initial release.
-
-
-Future Plans
-============
-
-* Additional utilities to list/remove/verify packages
-* Signature checking?  SSL?  Ability to suppress PyPI search?
-* Display byte progress meter when downloading distributions and long pages?
-* Redirect stdout/stderr to log during run_setup?
-
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/index.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/index.txt
deleted file mode 100644
index 5f3b945b200a8a0504d65a1aaf28892d0243a037..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/index.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-Welcome to Distribute's documentation!
-======================================
-
-`Distribute` is a fork of the `Setuptools` project.
-
-Distribute is intended to replace Setuptools as the standard method for
-working with Python module distributions.
-
-For those who may wonder why they should switch to Distribute over Setuptools, it’s quite simple:
-
-- Distribute is a drop-in replacement for Setuptools
-- The code is actively maintained, and has over 10 commiters
-- Distribute offers Python 3 support !
-
-Documentation content:
-
-.. toctree::
-   :maxdepth: 2
-
-   roadmap
-   python3
-   using
-   setuptools
-   easy_install
-   pkg_resources
-
-
-.. image:: http://python-distribute.org/pip_distribute.png
-
-Design done by Idan Gazit (http://pixane.com) - License: cc-by-3.0
-
-Copy & paste::
-
- curl -O http://python-distribute.org/distribute_setup.py
- python distribute_setup.py
- easy_install pip
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/pkg_resources.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/pkg_resources.txt
deleted file mode 100644
index 480f9547ceb1fdf60c55e2e12a393a49ca84207f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/pkg_resources.txt
+++ /dev/null
@@ -1,1955 +0,0 @@
-=============================================================
-Package Discovery and Resource Access using ``pkg_resources``
-=============================================================
-
-The ``pkg_resources`` module distributed with ``setuptools`` provides an API
-for Python libraries to access their resource files, and for extensible
-applications and frameworks to automatically discover plugins.  It also
-provides runtime support for using C extensions that are inside zipfile-format
-eggs, support for merging packages that have separately-distributed modules or
-subpackages, and APIs for managing Python's current "working set" of active
-packages.
-
-
-.. contents:: **Table of Contents**
-
-
---------
-Overview
---------
-
-Eggs are a distribution format for Python modules, similar in concept to Java's
-"jars" or Ruby's "gems".  They differ from previous Python distribution formats
-in that they are importable (i.e. they can be added to ``sys.path``), and they
-are *discoverable*, meaning that they carry metadata that unambiguously
-identifies their contents and dependencies, and thus can be *automatically*
-found and added to ``sys.path`` in response to simple requests of the form,
-"get me everything I need to use docutils' PDF support".
-
-The ``pkg_resources`` module provides runtime facilities for finding,
-introspecting, activating and using eggs and other "pluggable" distribution
-formats.  Because these are new concepts in Python (and not that well-
-established in other languages either), it helps to have a few special terms
-for talking about eggs and how they can be used:
-
-project
-    A library, framework, script, plugin, application, or collection of data
-    or other resources, or some combination thereof.  Projects are assumed to
-    have "relatively unique" names, e.g. names registered with PyPI.
-
-release
-    A snapshot of a project at a particular point in time, denoted by a version
-    identifier.
-
-distribution
-    A file or files that represent a particular release.
-
-importable distribution
-    A file or directory that, if placed on ``sys.path``, allows Python to
-    import any modules contained within it.
-
-pluggable distribution
-    An importable distribution whose filename unambiguously identifies its
-    release (i.e. project and version), and whose contents unamabiguously
-    specify what releases of other projects will satisfy its runtime
-    requirements.
-
-extra
-    An "extra" is an optional feature of a release, that may impose additional
-    runtime requirements.  For example, if docutils PDF support required a
-    PDF support library to be present, docutils could define its PDF support as
-    an "extra", and list what other project releases need to be available in
-    order to provide it.
-
-environment
-    A collection of distributions potentially available for importing, but not
-    necessarily active.  More than one distribution (i.e. release version) for
-    a given project may be present in an environment.
-
-working set
-    A collection of distributions actually available for importing, as on
-    ``sys.path``.  At most one distribution (release version) of a given
-    project may be present in a working set, as otherwise there would be
-    ambiguity as to what to import.
-
-eggs
-    Eggs are pluggable distributions in one of the three formats currently
-    supported by ``pkg_resources``.  There are built eggs, development eggs,
-    and egg links.  Built eggs are directories or zipfiles whose name ends
-    with ``.egg`` and follows the egg naming conventions, and contain an
-    ``EGG-INFO`` subdirectory (zipped or otherwise).  Development eggs are
-    normal directories of Python code with one or more ``ProjectName.egg-info``
-    subdirectories.  And egg links are ``*.egg-link`` files that contain the
-    name of a built or development egg, to support symbolic linking on
-    platforms that do not have native symbolic links.
-
-(For more information about these terms and concepts, see also this
-`architectural overview`_ of ``pkg_resources`` and Python Eggs in general.)
-
-.. _architectural overview: http://mail.python.org/pipermail/distutils-sig/2005-June/004652.html
-
-
-.. -----------------
-.. Developer's Guide
-.. -----------------
-
-.. This section isn't written yet.  Currently planned topics include
-    Accessing Resources
-    Finding and Activating Package Distributions
-        get_provider()
-        require()
-        WorkingSet
-        iter_distributions
-    Running Scripts
-    Configuration
-    Namespace Packages
-    Extensible Applications and Frameworks
-        Locating entry points
-        Activation listeners
-        Metadata access
-        Extended Discovery and Installation
-    Supporting Custom PEP 302 Implementations
-.. For now, please check out the extensive `API Reference`_ below.
-
-
--------------
-API Reference
--------------
-
-Namespace Package Support
-=========================
-
-A namespace package is a package that only contains other packages and modules,
-with no direct contents of its own.  Such packages can be split across
-multiple, separately-packaged distributions.  Normally, you do not need to use
-the namespace package APIs directly; instead you should supply the
-``namespace_packages`` argument to ``setup()`` in your project's ``setup.py``.
-See the `setuptools documentation on namespace packages`_ for more information.
-
-However, if for some reason you need to manipulate namespace packages or
-directly alter ``sys.path`` at runtime, you may find these APIs useful:
-
-``declare_namespace(name)``
-    Declare that the dotted package name `name` is a "namespace package" whose
-    contained packages and modules may be spread across multiple distributions.
-    The named package's ``__path__`` will be extended to include the
-    corresponding package in all distributions on ``sys.path`` that contain a
-    package of that name.  (More precisely, if an importer's
-    ``find_module(name)`` returns a loader, then it will also be searched for
-    the package's contents.)  Whenever a Distribution's ``activate()`` method
-    is invoked, it checks for the presence of namespace packages and updates
-    their ``__path__`` contents accordingly.
-
-Applications that manipulate namespace packages or directly alter ``sys.path``
-at runtime may also need to use this API function:
-
-``fixup_namespace_packages(path_item)``
-    Declare that `path_item` is a newly added item on ``sys.path`` that may
-    need to be used to update existing namespace packages.  Ordinarily, this is
-    called for you when an egg is automatically added to ``sys.path``, but if
-    your application modifies ``sys.path`` to include locations that may
-    contain portions of a namespace package, you will need to call this
-    function to ensure they are added to the existing namespace packages.
-
-Although by default ``pkg_resources`` only supports namespace packages for
-filesystem and zip importers, you can extend its support to other "importers"
-compatible with PEP 302 using the ``register_namespace_handler()`` function.
-See the section below on `Supporting Custom Importers`_ for details.
-
-.. _setuptools documentation on namespace packages: http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
-
-
-``WorkingSet`` Objects
-======================
-
-The ``WorkingSet`` class provides access to a collection of "active"
-distributions.  In general, there is only one meaningful ``WorkingSet``
-instance: the one that represents the distributions that are currently active
-on ``sys.path``.  This global instance is available under the name
-``working_set`` in the ``pkg_resources`` module.  However, specialized
-tools may wish to manipulate working sets that don't correspond to
-``sys.path``, and therefore may wish to create other ``WorkingSet`` instances.
-
-It's important to note that the global ``working_set`` object is initialized
-from ``sys.path`` when ``pkg_resources`` is first imported, but is only updated
-if you do all future ``sys.path`` manipulation via ``pkg_resources`` APIs.  If
-you manually modify ``sys.path``, you must invoke the appropriate methods on
-the ``working_set`` instance to keep it in sync.  Unfortunately, Python does
-not provide any way to detect arbitrary changes to a list object like
-``sys.path``, so ``pkg_resources`` cannot automatically update the
-``working_set`` based on changes to ``sys.path``.
-
-``WorkingSet(entries=None)``
-    Create a ``WorkingSet`` from an iterable of path entries.  If `entries`
-    is not supplied, it defaults to the value of ``sys.path`` at the time
-    the constructor is called.
-
-    Note that you will not normally construct ``WorkingSet`` instances
-    yourself, but instead you will implicitly or explicitly use the global
-    ``working_set`` instance.  For the most part, the ``pkg_resources`` API
-    is designed so that the ``working_set`` is used by default, such that you
-    don't have to explicitly refer to it most of the time.
-
-
-Basic ``WorkingSet`` Methods
-----------------------------
-
-The following methods of ``WorkingSet`` objects are also available as module-
-level functions in ``pkg_resources`` that apply to the default ``working_set``
-instance.  Thus, you can use e.g. ``pkg_resources.require()`` as an
-abbreviation for ``pkg_resources.working_set.require()``:
-
-
-``require(*requirements)``
-    Ensure that distributions matching `requirements` are activated
-
-    `requirements` must be a string or a (possibly-nested) sequence
-    thereof, specifying the distributions and versions required.  The
-    return value is a sequence of the distributions that needed to be
-    activated to fulfill the requirements; all relevant distributions are
-    included, even if they were already activated in this working set.
-
-    For the syntax of requirement specifiers, see the section below on
-    `Requirements Parsing`_.
-
-    In general, it should not be necessary for you to call this method
-    directly.  It's intended more for use in quick-and-dirty scripting and
-    interactive interpreter hacking than for production use. If you're creating
-    an actual library or application, it's strongly recommended that you create
-    a "setup.py" script using ``setuptools``, and declare all your requirements
-    there.  That way, tools like EasyInstall can automatically detect what
-    requirements your package has, and deal with them accordingly.
-
-    Note that calling ``require('SomePackage')`` will not install
-    ``SomePackage`` if it isn't already present.  If you need to do this, you
-    should use the ``resolve()`` method instead, which allows you to pass an
-    ``installer`` callback that will be invoked when a needed distribution
-    can't be found on the local machine.  You can then have this callback
-    display a dialog, automatically download the needed distribution, or
-    whatever else is appropriate for your application. See the documentation
-    below on the ``resolve()`` method for more information, and also on the
-    ``obtain()`` method of ``Environment`` objects.
-
-``run_script(requires, script_name)``
-    Locate distribution specified by `requires` and run its `script_name`
-    script.  `requires` must be a string containing a requirement specifier.
-    (See `Requirements Parsing`_ below for the syntax.)
-
-    The script, if found, will be executed in *the caller's globals*.  That's
-    because this method is intended to be called from wrapper scripts that
-    act as a proxy for the "real" scripts in a distribution.  A wrapper script
-    usually doesn't need to do anything but invoke this function with the
-    correct arguments.
-
-    If you need more control over the script execution environment, you
-    probably want to use the ``run_script()`` method of a ``Distribution``
-    object's `Metadata API`_ instead.
-
-``iter_entry_points(group, name=None)``
-    Yield entry point objects from `group` matching `name`
-
-    If `name` is None, yields all entry points in `group` from all
-    distributions in the working set, otherwise only ones matching both
-    `group` and `name` are yielded.  Entry points are yielded from the active
-    distributions in the order that the distributions appear in the working
-    set.  (For the global ``working_set``, this should be the same as the order
-    that they are listed in ``sys.path``.)  Note that within the entry points
-    advertised by an individual distribution, there is no particular ordering.
-
-    Please see the section below on `Entry Points`_ for more information.
-
-
-``WorkingSet`` Methods and Attributes
--------------------------------------
-
-These methods are used to query or manipulate the contents of a specific
-working set, so they must be explicitly invoked on a particular ``WorkingSet``
-instance:
-
-``add_entry(entry)``
-    Add a path item to the ``entries``, finding any distributions on it.  You
-    should use this when you add additional items to ``sys.path`` and you want
-    the global ``working_set`` to reflect the change.  This method is also
-    called by the ``WorkingSet()`` constructor during initialization.
-
-    This method uses ``find_distributions(entry,True)`` to find distributions
-    corresponding to the path entry, and then ``add()`` them.  `entry` is
-    always appended to the ``entries`` attribute, even if it is already
-    present, however. (This is because ``sys.path`` can contain the same value
-    more than once, and the ``entries`` attribute should be able to reflect
-    this.)
-
-``__contains__(dist)``
-    True if `dist` is active in this ``WorkingSet``.  Note that only one
-    distribution for a given project can be active in a given ``WorkingSet``.
-
-``__iter__()``
-    Yield distributions for non-duplicate projects in the working set.
-    The yield order is the order in which the items' path entries were
-    added to the working set.
-
-``find(req)``
-    Find a distribution matching `req` (a ``Requirement`` instance).
-    If there is an active distribution for the requested project, this
-    returns it, as long as it meets the version requirement specified by
-    `req`.  But, if there is an active distribution for the project and it
-    does *not* meet the `req` requirement, ``VersionConflict`` is raised.
-    If there is no active distribution for the requested project, ``None``
-    is returned.
-
-``resolve(requirements, env=None, installer=None)``
-    List all distributions needed to (recursively) meet `requirements`
-
-    `requirements` must be a sequence of ``Requirement`` objects.  `env`,
-    if supplied, should be an ``Environment`` instance.  If
-    not supplied, an ``Environment`` is created from the working set's
-    ``entries``.  `installer`, if supplied, will be invoked with each
-    requirement that cannot be met by an already-installed distribution; it
-    should return a ``Distribution`` or ``None``.  (See the ``obtain()`` method
-    of `Environment Objects`_, below, for more information on the `installer`
-    argument.)
-
-``add(dist, entry=None)``
-    Add `dist` to working set, associated with `entry`
-
-    If `entry` is unspecified, it defaults to ``dist.location``.  On exit from
-    this routine, `entry` is added to the end of the working set's ``.entries``
-    (if it wasn't already present).
-
-    `dist` is only added to the working set if it's for a project that
-    doesn't already have a distribution active in the set.  If it's
-    successfully added, any  callbacks registered with the ``subscribe()``
-    method will be called.  (See `Receiving Change Notifications`_, below.)
-
-    Note: ``add()`` is automatically called for you by the ``require()``
-    method, so you don't normally need to use this method directly.
-
-``entries``
-    This attribute represents a "shadow" ``sys.path``, primarily useful for
-    debugging.  If you are experiencing import problems, you should check
-    the global ``working_set`` object's ``entries`` against ``sys.path``, to
-    ensure that they match.  If they do not, then some part of your program
-    is manipulating ``sys.path`` without updating the ``working_set``
-    accordingly.  IMPORTANT NOTE: do not directly manipulate this attribute!
-    Setting it equal to ``sys.path`` will not fix your problem, any more than
-    putting black tape over an "engine warning" light will fix your car!  If
-    this attribute is out of sync with ``sys.path``, it's merely an *indicator*
-    of the problem, not the cause of it.
-
-
-Receiving Change Notifications
-------------------------------
-
-Extensible applications and frameworks may need to receive notification when
-a new distribution (such as a plug-in component) has been added to a working
-set.  This is what the ``subscribe()`` method and ``add_activation_listener()``
-function are for.
-
-``subscribe(callback)``
-    Invoke ``callback(distribution)`` once for each active distribution that is
-    in the set now, or gets added later.  Because the callback is invoked for
-    already-active distributions, you do not need to loop over the working set
-    yourself to deal with the existing items; just register the callback and
-    be prepared for the fact that it will be called immediately by this method.
-
-    Note that callbacks *must not* allow exceptions to propagate, or they will
-    interfere with the operation of other callbacks and possibly result in an
-    inconsistent working set state.  Callbacks should use a try/except block
-    to ignore, log, or otherwise process any errors, especially since the code
-    that caused the callback to be invoked is unlikely to be able to handle
-    the errors any better than the callback itself.
-
-``pkg_resources.add_activation_listener()`` is an alternate spelling of
-``pkg_resources.working_set.subscribe()``.
-
-
-Locating Plugins
-----------------
-
-Extensible applications will sometimes have a "plugin directory" or a set of
-plugin directories, from which they want to load entry points or other
-metadata.  The ``find_plugins()`` method allows you to do this, by scanning an
-environment for the newest version of each project that can be safely loaded
-without conflicts or missing requirements.
-
-``find_plugins(plugin_env, full_env=None, fallback=True)``
-   Scan `plugin_env` and identify which distributions could be added to this
-   working set without version conflicts or missing requirements.
-
-   Example usage::
-
-       distributions, errors = working_set.find_plugins(
-           Environment(plugin_dirlist)
-       )
-       map(working_set.add, distributions)  # add plugins+libs to sys.path
-       print "Couldn't load", errors        # display errors
-
-   The `plugin_env` should be an ``Environment`` instance that contains only
-   distributions that are in the project's "plugin directory" or directories.
-   The `full_env`, if supplied, should be an ``Environment`` instance that
-   contains all currently-available distributions.
-
-   If `full_env` is not supplied, one is created automatically from the
-   ``WorkingSet`` this method is called on, which will typically mean that
-   every directory on ``sys.path`` will be scanned for distributions.
-
-   This method returns a 2-tuple: (`distributions`, `error_info`), where
-   `distributions` is a list of the distributions found in `plugin_env` that
-   were loadable, along with any other distributions that are needed to resolve
-   their dependencies.  `error_info` is a dictionary mapping unloadable plugin
-   distributions to an exception instance describing the error that occurred.
-   Usually this will be a ``DistributionNotFound`` or ``VersionConflict``
-   instance.
-
-   Most applications will use this method mainly on the master ``working_set``
-   instance in ``pkg_resources``, and then immediately add the returned
-   distributions to the working set so that they are available on sys.path.
-   This will make it possible to find any entry points, and allow any other
-   metadata tracking and hooks to be activated.
-
-   The resolution algorithm used by ``find_plugins()`` is as follows.  First,
-   the project names of the distributions present in `plugin_env` are sorted.
-   Then, each project's eggs are tried in descending version order (i.e.,
-   newest version first).
-
-   An attempt is made to resolve each egg's dependencies. If the attempt is
-   successful, the egg and its dependencies are added to the output list and to
-   a temporary copy of the working set.  The resolution process continues with
-   the next project name, and no older eggs for that project are tried.
-
-   If the resolution attempt fails, however, the error is added to the error
-   dictionary.  If the `fallback` flag is true, the next older version of the
-   plugin is tried, until a working version is found.  If false, the resolution
-   process continues with the next plugin project name.
-
-   Some applications may have stricter fallback requirements than others. For
-   example, an application that has a database schema or persistent objects
-   may not be able to safely downgrade a version of a package. Others may want
-   to ensure that a new plugin configuration is either 100% good or else
-   revert to a known-good configuration.  (That is, they may wish to revert to
-   a known configuration if the `error_info` return value is non-empty.)
-
-   Note that this algorithm gives precedence to satisfying the dependencies of
-   alphabetically prior project names in case of version conflicts. If two
-   projects named "AaronsPlugin" and "ZekesPlugin" both need different versions
-   of "TomsLibrary", then "AaronsPlugin" will win and "ZekesPlugin" will be
-   disabled due to version conflict.
-
-
-``Environment`` Objects
-=======================
-
-An "environment" is a collection of ``Distribution`` objects, usually ones
-that are present and potentially importable on the current platform.
-``Environment`` objects are used by ``pkg_resources`` to index available
-distributions during dependency resolution.
-
-``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)``
-    Create an environment snapshot by scanning `search_path` for distributions
-    compatible with `platform` and `python`.  `search_path` should be a
-    sequence of strings such as might be used on ``sys.path``.  If a
-    `search_path` isn't supplied, ``sys.path`` is used.
-
-    `platform` is an optional string specifying the name of the platform
-    that platform-specific distributions must be compatible with.  If
-    unspecified, it defaults to the current platform.  `python` is an
-    optional string naming the desired version of Python (e.g. ``'2.4'``);
-    it defaults to the currently-running version.
-
-    You may explicitly set `platform` (and/or `python`) to ``None`` if you
-    wish to include *all* distributions, not just those compatible with the
-    running platform or Python version.
-
-    Note that `search_path` is scanned immediately for distributions, and the
-    resulting ``Environment`` is a snapshot of the found distributions.  It
-    is not automatically updated if the system's state changes due to e.g.
-    installation or removal of distributions.
-
-``__getitem__(project_name)``
-    Returns a list of distributions for the given project name, ordered
-    from newest to oldest version.  (And highest to lowest format precedence
-    for distributions that contain the same version of the project.)  If there
-    are no distributions for the project, returns an empty list.
-
-``__iter__()``
-    Yield the unique project names of the distributions in this environment.
-    The yielded names are always in lower case.
-
-``add(dist)``
-    Add `dist` to the environment if it matches the platform and python version
-    specified at creation time, and only if the distribution hasn't already
-    been added. (i.e., adding the same distribution more than once is a no-op.)
-
-``remove(dist)``
-    Remove `dist` from the environment.
-
-``can_add(dist)``
-    Is distribution `dist` acceptable for this environment?  If it's not
-    compatible with the ``platform`` and ``python`` version values specified
-    when the environment was created, a false value is returned.
-
-``__add__(dist_or_env)``  (``+`` operator)
-    Add a distribution or environment to an ``Environment`` instance, returning
-    a *new* environment object that contains all the distributions previously
-    contained by both.  The new environment will have a ``platform`` and
-    ``python`` of ``None``, meaning that it will not reject any distributions
-    from being added to it; it will simply accept whatever is added.  If you
-    want the added items to be filtered for platform and Python version, or
-    you want to add them to the *same* environment instance, you should use
-    in-place addition (``+=``) instead.
-
-``__iadd__(dist_or_env)``  (``+=`` operator)
-    Add a distribution or environment to an ``Environment`` instance
-    *in-place*, updating the existing instance and returning it.  The
-    ``platform`` and ``python`` filter attributes take effect, so distributions
-    in the source that do not have a suitable platform string or Python version
-    are silently ignored.
-
-``best_match(req, working_set, installer=None)``
-    Find distribution best matching `req` and usable on `working_set`
-
-    This calls the ``find(req)`` method of the `working_set` to see if a
-    suitable distribution is already active.  (This may raise
-    ``VersionConflict`` if an unsuitable version of the project is already
-    active in the specified `working_set`.)  If a suitable distribution isn't
-    active, this method returns the newest distribution in the environment
-    that meets the ``Requirement`` in `req`.  If no suitable distribution is
-    found, and `installer` is supplied, then the result of calling
-    the environment's ``obtain(req, installer)`` method will be returned.
-
-``obtain(requirement, installer=None)``
-    Obtain a distro that matches requirement (e.g. via download).  In the
-    base ``Environment`` class, this routine just returns
-    ``installer(requirement)``, unless `installer` is None, in which case
-    None is returned instead.  This method is a hook that allows subclasses
-    to attempt other ways of obtaining a distribution before falling back
-    to the `installer` argument.
-
-``scan(search_path=None)``
-    Scan `search_path` for distributions usable on `platform`
-
-    Any distributions found are added to the environment.  `search_path` should
-    be a sequence of strings such as might be used on ``sys.path``.  If not
-    supplied, ``sys.path`` is used.  Only distributions conforming to
-    the platform/python version defined at initialization are added.  This
-    method is a shortcut for using the ``find_distributions()`` function to
-    find the distributions from each item in `search_path`, and then calling
-    ``add()`` to add each one to the environment.
-
-
-``Requirement`` Objects
-=======================
-
-``Requirement`` objects express what versions of a project are suitable for
-some purpose.  These objects (or their string form) are used by various
-``pkg_resources`` APIs in order to find distributions that a script or
-distribution needs.
-
-
-Requirements Parsing
---------------------
-
-``parse_requirements(s)``
-    Yield ``Requirement`` objects for a string or iterable of lines.  Each
-    requirement must start on a new line.  See below for syntax.
-
-``Requirement.parse(s)``
-    Create a ``Requirement`` object from a string or iterable of lines.  A
-    ``ValueError`` is raised if the string or lines do not contain a valid
-    requirement specifier, or if they contain more than one specifier.  (To
-    parse multiple specifiers from a string or iterable of strings, use
-    ``parse_requirements()`` instead.)
-
-    The syntax of a requirement specifier can be defined in EBNF as follows::
-
-        requirement  ::= project_name versionspec? extras?
-        versionspec  ::= comparison version (',' comparison version)*
-        comparison   ::= '<' | '<=' | '!=' | '==' | '>=' | '>'
-        extras       ::= '[' extralist? ']'
-        extralist    ::= identifier (',' identifier)*
-        project_name ::= identifier
-        identifier   ::= [-A-Za-z0-9_]+
-        version      ::= [-A-Za-z0-9_.]+
-
-    Tokens can be separated by whitespace, and a requirement can be continued
-    over multiple lines using a backslash (``\\``).  Line-end comments (using
-    ``#``) are also allowed.
-
-    Some examples of valid requirement specifiers::
-
-        FooProject >= 1.2
-        Fizzy [foo, bar]
-        PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1
-        SomethingWhoseVersionIDontCareAbout
-
-    The project name is the only required portion of a requirement string, and
-    if it's the only thing supplied, the requirement will accept any version
-    of that project.
-
-    The "extras" in a requirement are used to request optional features of a
-    project, that may require additional project distributions in order to
-    function.  For example, if the hypothetical "Report-O-Rama" project offered
-    optional PDF support, it might require an additional library in order to
-    provide that support.  Thus, a project needing Report-O-Rama's PDF features
-    could use a requirement of ``Report-O-Rama[PDF]`` to request installation
-    or activation of both Report-O-Rama and any libraries it needs in order to
-    provide PDF support.  For example, you could use::
-
-        easy_install.py Report-O-Rama[PDF]
-
-    To install the necessary packages using the EasyInstall program, or call
-    ``pkg_resources.require('Report-O-Rama[PDF]')`` to add the necessary
-    distributions to sys.path at runtime.
-
-
-``Requirement`` Methods and Attributes
---------------------------------------
-
-``__contains__(dist_or_version)``
-    Return true if `dist_or_version` fits the criteria for this requirement.
-    If `dist_or_version` is a ``Distribution`` object, its project name must
-    match the requirement's project name, and its version must meet the
-    requirement's version criteria.  If `dist_or_version` is a string, it is
-    parsed using the ``parse_version()`` utility function.  Otherwise, it is
-    assumed to be an already-parsed version.
-
-    The ``Requirement`` object's version specifiers (``.specs``) are internally
-    sorted into ascending version order, and used to establish what ranges of
-    versions are acceptable.  Adjacent redundant conditions are effectively
-    consolidated (e.g. ``">1, >2"`` produces the same results as ``">1"``, and
-    ``"<2,<3"`` produces the same results as``"<3"``). ``"!="`` versions are
-    excised from the ranges they fall within.  The version being tested for
-    acceptability is then checked for membership in the resulting ranges.
-    (Note that providing conflicting conditions for the same version (e.g.
-    ``"<2,>=2"`` or ``"==2,!=2"``) is meaningless and may therefore produce
-    bizarre results when compared with actual version number(s).)
-
-``__eq__(other_requirement)``
-    A requirement compares equal to another requirement if they have
-    case-insensitively equal project names, version specifiers, and "extras".
-    (The order that extras and version specifiers are in is also ignored.)
-    Equal requirements also have equal hashes, so that requirements can be
-    used in sets or as dictionary keys.
-
-``__str__()``
-    The string form of a ``Requirement`` is a string that, if passed to
-    ``Requirement.parse()``, would return an equal ``Requirement`` object.
-
-``project_name``
-    The name of the required project
-
-``key``
-    An all-lowercase version of the ``project_name``, useful for comparison
-    or indexing.
-
-``extras``
-    A tuple of names of "extras" that this requirement calls for.  (These will
-    be all-lowercase and normalized using the ``safe_extra()`` parsing utility
-    function, so they may not exactly equal the extras the requirement was
-    created with.)
-
-``specs``
-    A list of ``(op,version)`` tuples, sorted in ascending parsed-version
-    order.  The `op` in each tuple is a comparison operator, represented as
-    a string.  The `version` is the (unparsed) version number.  The relative
-    order of tuples containing the same version numbers is undefined, since
-    having more than one operator for a given version is either redundant or
-    self-contradictory.
-
-
-Entry Points
-============
-
-Entry points are a simple way for distributions to "advertise" Python objects
-(such as functions or classes) for use by other distributions.  Extensible
-applications and frameworks can search for entry points with a particular name
-or group, either from a specific distribution or from all active distributions
-on sys.path, and then inspect or load the advertised objects at will.
-
-Entry points belong to "groups" which are named with a dotted name similar to
-a Python package or module name.  For example, the ``setuptools`` package uses
-an entry point named ``distutils.commands`` in order to find commands defined
-by distutils extensions.  ``setuptools`` treats the names of entry points
-defined in that group as the acceptable commands for a setup script.
-
-In a similar way, other packages can define their own entry point groups,
-either using dynamic names within the group (like ``distutils.commands``), or
-possibly using predefined names within the group.  For example, a blogging
-framework that offers various pre- or post-publishing hooks might define an
-entry point group and look for entry points named "pre_process" and
-"post_process" within that group.
-
-To advertise an entry point, a project needs to use ``setuptools`` and provide
-an ``entry_points`` argument to ``setup()`` in its setup script, so that the
-entry points will be included in the distribution's metadata.  For more
-details, see the ``setuptools`` documentation.  (XXX link here to setuptools)
-
-Each project distribution can advertise at most one entry point of a given
-name within the same entry point group.  For example, a distutils extension
-could advertise two different ``distutils.commands`` entry points, as long as
-they had different names.  However, there is nothing that prevents *different*
-projects from advertising entry points of the same name in the same group.  In
-some cases, this is a desirable thing, since the application or framework that
-uses the entry points may be calling them as hooks, or in some other way
-combining them.  It is up to the application or framework to decide what to do
-if multiple distributions advertise an entry point; some possibilities include
-using both entry points, displaying an error message, using the first one found
-in sys.path order, etc.
-
-
-Convenience API
----------------
-
-In the following functions, the `dist` argument can be a ``Distribution``
-instance, a ``Requirement`` instance, or a string specifying a requirement
-(i.e. project name, version, etc.).  If the argument is a string or
-``Requirement``, the specified distribution is located (and added to sys.path
-if not already present).  An error will be raised if a matching distribution is
-not available.
-
-The `group` argument should be a string containing a dotted identifier,
-identifying an entry point group.  If you are defining an entry point group,
-you should include some portion of your package's name in the group name so as
-to avoid collision with other packages' entry point groups.
-
-``load_entry_point(dist, group, name)``
-    Load the named entry point from the specified distribution, or raise
-    ``ImportError``.
-
-``get_entry_info(dist, group, name)``
-    Return an ``EntryPoint`` object for the given `group` and `name` from
-    the specified distribution.  Returns ``None`` if the distribution has not
-    advertised a matching entry point.
-
-``get_entry_map(dist, group=None)``
-    Return the distribution's entry point map for `group`, or the full entry
-    map for the distribution.  This function always returns a dictionary,
-    even if the distribution advertises no entry points.  If `group` is given,
-    the dictionary maps entry point names to the corresponding ``EntryPoint``
-    object.  If `group` is None, the dictionary maps group names to
-    dictionaries that then map entry point names to the corresponding
-    ``EntryPoint`` instance in that group.
-
-``iter_entry_points(group, name=None)``
-    Yield entry point objects from `group` matching `name`.
-
-    If `name` is None, yields all entry points in `group` from all
-    distributions in the working set on sys.path, otherwise only ones matching
-    both `group` and `name` are yielded.  Entry points are yielded from
-    the active distributions in the order that the distributions appear on
-    sys.path.  (Within entry points for a particular distribution, however,
-    there is no particular ordering.)
-
-    (This API is actually a method of the global ``working_set`` object; see
-    the section above on `Basic WorkingSet Methods`_ for more information.)
-
-
-Creating and Parsing
---------------------
-
-``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)``
-    Create an ``EntryPoint`` instance.  `name` is the entry point name.  The
-    `module_name` is the (dotted) name of the module containing the advertised
-    object.  `attrs` is an optional tuple of names to look up from the
-    module to obtain the advertised object.  For example, an `attrs` of
-    ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the
-    advertised object could be obtained by the following code::
-
-        import baz
-        advertised_object = baz.foo.bar
-
-    The `extras` are an optional tuple of "extra feature" names that the
-    distribution needs in order to provide this entry point.  When the
-    entry point is loaded, these extra features are looked up in the `dist`
-    argument to find out what other distributions may need to be activated
-    on sys.path; see the ``load()`` method for more details.  The `extras`
-    argument is only meaningful if `dist` is specified.  `dist` must be
-    a ``Distribution`` instance.
-
-``EntryPoint.parse(src, dist=None)`` (classmethod)
-    Parse a single entry point from string `src`
-
-    Entry point syntax follows the form::
-
-        name = some.module:some.attr [extra1,extra2]
-
-    The entry name and module name are required, but the ``:attrs`` and
-    ``[extras]`` parts are optional, as is the whitespace shown between
-    some of the items.  The `dist` argument is passed through to the
-    ``EntryPoint()`` constructor, along with the other values parsed from
-    `src`.
-
-``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod)
-    Parse `lines` (a string or sequence of lines) to create a dictionary
-    mapping entry point names to ``EntryPoint`` objects.  ``ValueError`` is
-    raised if entry point names are duplicated, if `group` is not a valid
-    entry point group name, or if there are any syntax errors.  (Note: the
-    `group` parameter is used only for validation and to create more
-    informative error messages.)  If `dist` is provided, it will be used to
-    set the ``dist`` attribute of the created ``EntryPoint`` objects.
-
-``EntryPoint.parse_map(data, dist=None)`` (classmethod)
-    Parse `data` into a dictionary mapping group names to dictionaries mapping
-    entry point names to ``EntryPoint`` objects.  If `data` is a dictionary,
-    then the keys are used as group names and the values are passed to
-    ``parse_group()`` as the `lines` argument.  If `data` is a string or
-    sequence of lines, it is first split into .ini-style sections (using
-    the ``split_sections()`` utility function) and the section names are used
-    as group names.  In either case, the `dist` argument is passed through to
-    ``parse_group()`` so that the entry points will be linked to the specified
-    distribution.
-
-
-``EntryPoint`` Objects
-----------------------
-
-For simple introspection, ``EntryPoint`` objects have attributes that
-correspond exactly to the constructor argument names: ``name``,
-``module_name``, ``attrs``, ``extras``, and ``dist`` are all available.  In
-addition, the following methods are provided:
-
-``load(require=True, env=None, installer=None)``
-    Load the entry point, returning the advertised Python object, or raise
-    ``ImportError`` if it cannot be obtained.  If `require` is a true value,
-    then ``require(env, installer)`` is called before attempting the import.
-
-``require(env=None, installer=None)``
-    Ensure that any "extras" needed by the entry point are available on
-    sys.path.  ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``,
-    but no ``dist``, or if the named extras are not defined by the
-    distribution.  If `env` is supplied, it must be an ``Environment``, and it
-    will be used to search for needed distributions if they are not already
-    present on sys.path.  If `installer` is supplied, it must be a callable
-    taking a ``Requirement`` instance and returning a matching importable
-    ``Distribution`` instance or None.
-
-``__str__()``
-    The string form of an ``EntryPoint`` is a string that could be passed to
-    ``EntryPoint.parse()`` to produce an equivalent ``EntryPoint``.
-
-
-``Distribution`` Objects
-========================
-
-``Distribution`` objects represent collections of Python code that may or may
-not be importable, and may or may not have metadata and resources associated
-with them.  Their metadata may include information such as what other projects
-the distribution depends on, what entry points the distribution advertises, and
-so on.
-
-
-Getting or Creating Distributions
----------------------------------
-
-Most commonly, you'll obtain ``Distribution`` objects from a ``WorkingSet`` or
-an ``Environment``.  (See the sections above on `WorkingSet Objects`_ and
-`Environment Objects`_, which are containers for active distributions and
-available distributions, respectively.)  You can also obtain ``Distribution``
-objects from one of these high-level APIs:
-
-``find_distributions(path_item, only=False)``
-    Yield distributions accessible via `path_item`.  If `only` is true, yield
-    only distributions whose ``location`` is equal to `path_item`.  In other
-    words, if `only` is true, this yields any distributions that would be
-    importable if `path_item` were on ``sys.path``.  If `only` is false, this
-    also yields distributions that are "in" or "under" `path_item`, but would
-    not be importable unless their locations were also added to ``sys.path``.
-
-``get_distribution(dist_spec)``
-    Return a ``Distribution`` object for a given ``Requirement`` or string.
-    If `dist_spec` is already a ``Distribution`` instance, it is returned.
-    If it is a ``Requirement`` object or a string that can be parsed into one,
-    it is used to locate and activate a matching distribution, which is then
-    returned.
-
-However, if you're creating specialized tools for working with distributions,
-or creating a new distribution format, you may also need to create
-``Distribution`` objects directly, using one of the three constructors below.
-
-These constructors all take an optional `metadata` argument, which is used to
-access any resources or metadata associated with the distribution.  `metadata`
-must be an object that implements the ``IResourceProvider`` interface, or None.
-If it is None, an ``EmptyProvider`` is used instead.  ``Distribution`` objects
-implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by
-delegating them to the `metadata` object.
-
-``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod)
-    Create a distribution for `location`, which must be a string such as a
-    URL, filename, or other string that might be used on ``sys.path``.
-    `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
-    If `basename` ends with ``.egg``, then the project's name, version, python
-    version and platform are extracted from the filename and used to set those
-    properties of the created distribution.  Any additional keyword arguments
-    are forwarded to the ``Distribution()`` constructor.
-
-``Distribution.from_filename(filename, metadata=None**kw)`` (classmethod)
-    Create a distribution by parsing a local filename.  This is a shorter way
-    of saying  ``Distribution.from_location(normalize_path(filename),
-    os.path.basename(filename), metadata)``.  In other words, it creates a
-    distribution whose location is the normalize form of the filename, parsing
-    name and version information from the base portion of the filename.  Any
-    additional keyword arguments are forwarded to the ``Distribution()``
-    constructor.
-
-``Distribution(location,metadata,project_name,version,py_version,platform,precedence)``
-    Create a distribution by setting its properties.  All arguments are
-    optional and default to None, except for `py_version` (which defaults to
-    the current Python version) and `precedence` (which defaults to
-    ``EGG_DIST``; for more details see ``precedence`` under `Distribution
-    Attributes`_ below).  Note that it's usually easier to use the
-    ``from_filename()`` or ``from_location()`` constructors than to specify
-    all these arguments individually.
-
-
-``Distribution`` Attributes
----------------------------
-
-location
-    A string indicating the distribution's location.  For an importable
-    distribution, this is the string that would be added to ``sys.path`` to
-    make it actively importable.  For non-importable distributions, this is
-    simply a filename, URL, or other way of locating the distribution.
-
-project_name
-    A string, naming the project that this distribution is for.  Project names
-    are defined by a project's setup script, and they are used to identify
-    projects on PyPI.  When a ``Distribution`` is constructed, the
-    `project_name` argument is passed through the ``safe_name()`` utility
-    function to filter out any unacceptable characters.
-
-key
-    ``dist.key`` is short for ``dist.project_name.lower()``.  It's used for
-    case-insensitive comparison and indexing of distributions by project name.
-
-extras
-    A list of strings, giving the names of extra features defined by the
-    project's dependency list (the ``extras_require`` argument specified in
-    the project's setup script).
-
-version
-    A string denoting what release of the project this distribution contains.
-    When a ``Distribution`` is constructed, the `version` argument is passed
-    through the ``safe_version()`` utility function to filter out any
-    unacceptable characters.  If no `version` is specified at construction
-    time, then attempting to access this attribute later will cause the
-    ``Distribution`` to try to discover its version by reading its ``PKG-INFO``
-    metadata file.  If ``PKG-INFO`` is unavailable or can't be parsed,
-    ``ValueError`` is raised.
-
-parsed_version
-    The ``parsed_version`` is a tuple representing a "parsed" form of the
-    distribution's ``version``.  ``dist.parsed_version`` is a shortcut for
-    calling ``parse_version(dist.version)``.  It is used to compare or sort
-    distributions by version.  (See the `Parsing Utilities`_ section below for
-    more information on the ``parse_version()`` function.)  Note that accessing
-    ``parsed_version`` may result in a ``ValueError`` if the ``Distribution``
-    was constructed without a `version` and without `metadata` capable of
-    supplying the missing version info.
-
-py_version
-    The major/minor Python version the distribution supports, as a string.
-    For example, "2.3" or "2.4".  The default is the current version of Python.
-
-platform
-    A string representing the platform the distribution is intended for, or
-    ``None`` if the distribution is "pure Python" and therefore cross-platform.
-    See `Platform Utilities`_ below for more information on platform strings.
-
-precedence
-    A distribution's ``precedence`` is used to determine the relative order of
-    two distributions that have the same ``project_name`` and
-    ``parsed_version``.  The default precedence is ``pkg_resources.EGG_DIST``,
-    which is the highest (i.e. most preferred) precedence.  The full list
-    of predefined precedences, from most preferred to least preferred, is:
-    ``EGG_DIST``, ``BINARY_DIST``, ``SOURCE_DIST``, ``CHECKOUT_DIST``, and
-    ``DEVELOP_DIST``.  Normally, precedences other than ``EGG_DIST`` are used
-    only by the ``setuptools.package_index`` module, when sorting distributions
-    found in a package index to determine their suitability for installation.
-    "System" and "Development" eggs (i.e., ones that use the ``.egg-info``
-    format), however, are automatically given a precedence of ``DEVELOP_DIST``.
-
-
-
-``Distribution`` Methods
-------------------------
-
-``activate(path=None)``
-    Ensure distribution is importable on `path`.  If `path` is None,
-    ``sys.path`` is used instead.  This ensures that the distribution's
-    ``location`` is in the `path` list, and it also performs any necessary
-    namespace package fixups or declarations.  (That is, if the distribution
-    contains namespace packages, this method ensures that they are declared,
-    and that the distribution's contents for those namespace packages are
-    merged with the contents provided by any other active distributions.  See
-    the section above on `Namespace Package Support`_ for more information.)
-
-    ``pkg_resources`` adds a notification callback to the global ``working_set``
-    that ensures this method is called whenever a distribution is added to it.
-    Therefore, you should not normally need to explicitly call this method.
-    (Note that this means that namespace packages on ``sys.path`` are always
-    imported as soon as ``pkg_resources`` is, which is another reason why
-    namespace packages should not contain any code or import statements.)
-
-``as_requirement()``
-    Return a ``Requirement`` instance that matches this distribution's project
-    name and version.
-
-``requires(extras=())``
-    List the ``Requirement`` objects that specify this distribution's
-    dependencies.  If `extras` is specified, it should be a sequence of names
-    of "extras" defined by the distribution, and the list returned will then
-    include any dependencies needed to support the named "extras".
-
-``clone(**kw)``
-    Create a copy of the distribution.  Any supplied keyword arguments override
-    the corresponding argument to the ``Distribution()`` constructor, allowing
-    you to change some of the copied distribution's attributes.
-
-``egg_name()``
-    Return what this distribution's standard filename should be, not including
-    the ".egg" extension.  For example, a distribution for project "Foo"
-    version 1.2 that runs on Python 2.3 for Windows would have an ``egg_name()``
-    of ``Foo-1.2-py2.3-win32``.  Any dashes in the name or version are
-    converted to underscores.  (``Distribution.from_location()`` will convert
-    them back when parsing a ".egg" file name.)
-
-``__cmp__(other)``, ``__hash__()``
-    Distribution objects are hashed and compared on the basis of their parsed
-    version and precedence, followed by their key (lowercase project name),
-    location, Python version, and platform.
-
-The following methods are used to access ``EntryPoint`` objects advertised
-by the distribution.  See the section above on `Entry Points`_ for more
-detailed information about these operations:
-
-``get_entry_info(group, name)``
-    Return the ``EntryPoint`` object for `group` and `name`, or None if no
-    such point is advertised by this distribution.
-
-``get_entry_map(group=None)``
-    Return the entry point map for `group`.  If `group` is None, return
-    a dictionary mapping group names to entry point maps for all groups.
-    (An entry point map is a dictionary of entry point names to ``EntryPoint``
-    objects.)
-
-``load_entry_point(group, name)``
-    Short for ``get_entry_info(group, name).load()``.  Returns the object
-    advertised by the named entry point, or raises ``ImportError`` if
-    the entry point isn't advertised by this distribution, or there is some
-    other import problem.
-
-In addition to the above methods, ``Distribution`` objects also implement all
-of the `IResourceProvider`_ and `IMetadataProvider Methods`_ (which are
-documented in later sections):
-
-* ``has_metadata(name)``
-* ``metadata_isdir(name)``
-* ``metadata_listdir(name)``
-* ``get_metadata(name)``
-* ``get_metadata_lines(name)``
-* ``run_script(script_name, namespace)``
-* ``get_resource_filename(manager, resource_name)``
-* ``get_resource_stream(manager, resource_name)``
-* ``get_resource_string(manager, resource_name)``
-* ``has_resource(resource_name)``
-* ``resource_isdir(resource_name)``
-* ``resource_listdir(resource_name)``
-
-If the distribution was created with a `metadata` argument, these resource and
-metadata access methods are all delegated to that `metadata` provider.
-Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution
-will appear to have no resources or metadata.  This delegation approach is used
-so that supporting custom importers or new distribution formats can be done
-simply by creating an appropriate `IResourceProvider`_ implementation; see the
-section below on `Supporting Custom Importers`_ for more details.
-
-
-``ResourceManager`` API
-=======================
-
-The ``ResourceManager`` class provides uniform access to package resources,
-whether those resources exist as files and directories or are compressed in
-an archive of some kind.
-
-Normally, you do not need to create or explicitly manage ``ResourceManager``
-instances, as the ``pkg_resources`` module creates a global instance for you,
-and makes most of its methods available as top-level names in the
-``pkg_resources`` module namespace.  So, for example, this code actually
-calls the ``resource_string()`` method of the global ``ResourceManager``::
-
-    import pkg_resources
-    my_data = pkg_resources.resource_string(__name__, "foo.dat")
-
-Thus, you can use the APIs below without needing an explicit
-``ResourceManager`` instance; just import and use them as needed.
-
-
-Basic Resource Access
----------------------
-
-In the following methods, the `package_or_requirement` argument may be either
-a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance.
-If it is a package or module name, the named module or package must be
-importable (i.e., be in a distribution or directory on ``sys.path``), and the
-`resource_name` argument is interpreted relative to the named package.  (Note
-that if a module name is used, then the resource name is relative to the
-package immediately containing the named module.  Also, you should not use use
-a namespace package name, because a namespace package can be spread across
-multiple distributions, and is therefore ambiguous as to which distribution
-should be searched for the resource.)
-
-If it is a ``Requirement``, then the requirement is automatically resolved
-(searching the current ``Environment`` if necessary) and a matching
-distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not
-already present.  (Unless the ``Requirement`` can't be satisfied, in which
-case an exception is raised.)  The `resource_name` argument is then interpreted
-relative to the root of the identified distribution; i.e. its first path
-segment will be treated as a peer of the top-level modules or packages in the
-distribution.
-
-Note that resource names must be ``/``-separated paths and cannot be absolute
-(i.e. no leading ``/``) or contain relative names like ``".."``.  Do *not* use
-``os.path`` routines to manipulate resource paths, as they are *not* filesystem
-paths.
-
-``resource_exists(package_or_requirement, resource_name)``
-    Does the named resource exist?  Return ``True`` or ``False`` accordingly.
-
-``resource_stream(package_or_requirement, resource_name)``
-    Return a readable file-like object for the specified resource; it may be
-    an actual file, a ``StringIO``, or some similar object.  The stream is
-    in "binary mode", in the sense that whatever bytes are in the resource
-    will be read as-is.
-
-``resource_string(package_or_requirement, resource_name)``
-    Return the specified resource as a string.  The resource is read in
-    binary fashion, such that the returned string contains exactly the bytes
-    that are stored in the resource.
-
-``resource_isdir(package_or_requirement, resource_name)``
-    Is the named resource a directory?  Return ``True`` or ``False``
-    accordingly.
-
-``resource_listdir(package_or_requirement, resource_name)``
-    List the contents of the named resource directory, just like ``os.listdir``
-    except that it works even if the resource is in a zipfile.
-
-Note that only ``resource_exists()`` and ``resource_isdir()`` are insensitive
-as to the resource type.  You cannot use ``resource_listdir()`` on a file
-resource, and you can't use ``resource_string()`` or ``resource_stream()`` on
-directory resources.  Using an inappropriate method for the resource type may
-result in an exception or undefined behavior, depending on the platform and
-distribution format involved.
-
-
-Resource Extraction
--------------------
-
-``resource_filename(package_or_requirement, resource_name)``
-    Sometimes, it is not sufficient to access a resource in string or stream
-    form, and a true filesystem filename is needed.  In such cases, you can
-    use this method (or module-level function) to obtain a filename for a
-    resource.  If the resource is in an archive distribution (such as a zipped
-    egg), it will be extracted to a cache directory, and the filename within
-    the cache will be returned.  If the named resource is a directory, then
-    all resources within that directory (including subdirectories) are also
-    extracted.  If the named resource is a C extension or "eager resource"
-    (see the ``setuptools`` documentation for details), then all C extensions
-    and eager resources are extracted at the same time.
-
-    Archived resources are extracted to a cache location that can be managed by
-    the following two methods:
-
-``set_extraction_path(path)``
-    Set the base path where resources will be extracted to, if needed.
-
-    If you do not call this routine before any extractions take place, the
-    path defaults to the return value of ``get_default_cache()``.  (Which is
-    based on the ``PYTHON_EGG_CACHE`` environment variable, with various
-    platform-specific fallbacks.  See that routine's documentation for more
-    details.)
-
-    Resources are extracted to subdirectories of this path based upon
-    information given by the resource provider.  You may set this to a
-    temporary directory, but then you must call ``cleanup_resources()`` to
-    delete the extracted files when done.  There is no guarantee that
-    ``cleanup_resources()`` will be able to remove all extracted files.  (On
-    Windows, for example, you can't unlink .pyd or .dll files that are still
-    in use.)
-
-    Note that you may not change the extraction path for a given resource
-    manager once resources have been extracted, unless you first call
-    ``cleanup_resources()``.
-
-``cleanup_resources(force=False)``
-    Delete all extracted resource files and directories, returning a list
-    of the file and directory names that could not be successfully removed.
-    This function does not have any concurrency protection, so it should
-    generally only be called when the extraction path is a temporary
-    directory exclusive to a single process.  This method is not
-    automatically called; you must call it explicitly or register it as an
-    ``atexit`` function if you wish to ensure cleanup of a temporary
-    directory used for extractions.
-
-
-"Provider" Interface
---------------------
-
-If you are implementing an ``IResourceProvider`` and/or ``IMetadataProvider``
-for a new distribution archive format, you may need to use the following
-``IResourceManager`` methods to co-ordinate extraction of resources to the
-filesystem.  If you're not implementing an archive format, however, you have
-no need to use these methods.  Unlike the other methods listed above, they are
-*not* available as top-level functions tied to the global ``ResourceManager``;
-you must therefore have an explicit ``ResourceManager`` instance to use them.
-
-``get_cache_path(archive_name, names=())``
-    Return absolute location in cache for `archive_name` and `names`
-
-    The parent directory of the resulting path will be created if it does
-    not already exist.  `archive_name` should be the base filename of the
-    enclosing egg (which may not be the name of the enclosing zipfile!),
-    including its ".egg" extension.  `names`, if provided, should be a
-    sequence of path name parts "under" the egg's extraction location.
-
-    This method should only be called by resource providers that need to
-    obtain an extraction location, and only for names they intend to
-    extract, as it tracks the generated names for possible cleanup later.
-
-``extraction_error()``
-    Raise an ``ExtractionError`` describing the active exception as interfering
-    with the extraction process.  You should call this if you encounter any
-    OS errors extracting the file to the cache path; it will format the
-    operating system exception for you, and add other information to the
-    ``ExtractionError`` instance that may be needed by programs that want to
-    wrap or handle extraction errors themselves.
-
-``postprocess(tempname, filename)``
-    Perform any platform-specific postprocessing of `tempname`.
-    Resource providers should call this method ONLY after successfully
-    extracting a compressed resource.  They must NOT call it on resources
-    that are already in the filesystem.
-
-    `tempname` is the current (temporary) name of the file, and `filename`
-    is the name it will be renamed to by the caller after this routine
-    returns.
-
-
-Metadata API
-============
-
-The metadata API is used to access metadata resources bundled in a pluggable
-distribution.  Metadata resources are virtual files or directories containing
-information about the distribution, such as might be used by an extensible
-application or framework to connect "plugins".  Like other kinds of resources,
-metadata resource names are ``/``-separated and should not contain ``..`` or
-begin with a ``/``.  You should not use ``os.path`` routines to manipulate
-resource paths.
-
-The metadata API is provided by objects implementing the ``IMetadataProvider``
-or ``IResourceProvider`` interfaces.  ``Distribution`` objects implement this
-interface, as do objects returned by the ``get_provider()`` function:
-
-``get_provider(package_or_requirement)``
-    If a package name is supplied, return an ``IResourceProvider`` for the
-    package.  If a ``Requirement`` is supplied, resolve it by returning a
-    ``Distribution`` from the current working set (searching the current
-    ``Environment`` if necessary and adding the newly found ``Distribution``
-    to the working set).  If the named package can't be imported, or the
-    ``Requirement`` can't be satisfied, an exception is raised.
-
-    NOTE: if you use a package name rather than a ``Requirement``, the object
-    you get back may not be a pluggable distribution, depending on the method
-    by which the package was installed.  In particular, "development" packages
-    and "single-version externally-managed" packages do not have any way to
-    map from a package name to the corresponding project's metadata.  Do not
-    write code that passes a package name to ``get_provider()`` and then tries
-    to retrieve project metadata from the returned object.  It may appear to
-    work when the named package is in an ``.egg`` file or directory, but
-    it will fail in other installation scenarios.  If you want project
-    metadata, you need to ask for a *project*, not a package.
-
-
-``IMetadataProvider`` Methods
------------------------------
-
-The methods provided by objects (such as ``Distribution`` instances) that
-implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are:
-
-``has_metadata(name)``
-    Does the named metadata resource exist?
-
-``metadata_isdir(name)``
-    Is the named metadata resource a directory?
-
-``metadata_listdir(name)``
-    List of metadata names in the directory (like ``os.listdir()``)
-
-``get_metadata(name)``
-    Return the named metadata resource as a string.  The data is read in binary
-    mode; i.e., the exact bytes of the resource file are returned.
-
-``get_metadata_lines(name)``
-    Yield named metadata resource as list of non-blank non-comment lines.  This
-    is short for calling ``yield_lines(provider.get_metadata(name))``.  See the
-    section on `yield_lines()`_ below for more information on the syntax it
-    recognizes.
-
-``run_script(script_name, namespace)``
-    Execute the named script in the supplied namespace dictionary.  Raises
-    ``ResolutionError`` if there is no script by that name in the ``scripts``
-    metadata directory.  `namespace` should be a Python dictionary, usually
-    a module dictionary if the script is being run as a module.
-
-
-Exceptions
-==========
-
-``pkg_resources`` provides a simple exception hierarchy for problems that may
-occur when processing requests to locate and activate packages::
-
-    ResolutionError
-        DistributionNotFound
-        VersionConflict
-        UnknownExtra
-
-    ExtractionError
-
-``ResolutionError``
-    This class is used as a base class for the other three exceptions, so that
-    you can catch all of them with a single "except" clause.  It is also raised
-    directly for miscellaneous requirement-resolution problems like trying to
-    run a script that doesn't exist in the distribution it was requested from.
-
-``DistributionNotFound``
-    A distribution needed to fulfill a requirement could not be found.
-
-``VersionConflict``
-    The requested version of a project conflicts with an already-activated
-    version of the same project.
-
-``UnknownExtra``
-    One of the "extras" requested was not recognized by the distribution it
-    was requested from.
-
-``ExtractionError``
-    A problem occurred extracting a resource to the Python Egg cache.  The
-    following attributes are available on instances of this exception:
-
-    manager
-        The resource manager that raised this exception
-
-    cache_path
-        The base directory for resource extraction
-
-    original_error
-        The exception instance that caused extraction to fail
-
-
-Supporting Custom Importers
-===========================
-
-By default, ``pkg_resources`` supports normal filesystem imports, and
-``zipimport`` importers.  If you wish to use the ``pkg_resources`` features
-with other (PEP 302-compatible) importers or module loaders, you may need to
-register various handlers and support functions using these APIs:
-
-``register_finder(importer_type, distribution_finder)``
-    Register `distribution_finder` to find distributions in ``sys.path`` items.
-    `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path``
-    item handler), and `distribution_finder` is a callable that, when passed a
-    path item, the importer instance, and an `only` flag, yields
-    ``Distribution`` instances found under that path item.  (The `only` flag,
-    if true, means the finder should yield only ``Distribution`` objects whose
-    ``location`` is equal to the path item provided.)
-
-    See the source of the ``pkg_resources.find_on_path`` function for an
-    example finder function.
-
-``register_loader_type(loader_type, provider_factory)``
-    Register `provider_factory` to make ``IResourceProvider`` objects for
-    `loader_type`.  `loader_type` is the type or class of a PEP 302
-    ``module.__loader__``, and `provider_factory` is a function that, when
-    passed a module object, returns an `IResourceProvider`_ for that module,
-    allowing it to be used with the `ResourceManager API`_.
-
-``register_namespace_handler(importer_type, namespace_handler)``
-    Register `namespace_handler` to declare namespace packages for the given
-    `importer_type`.  `importer_type` is the type or class of a PEP 302
-    "importer" (sys.path item handler), and `namespace_handler` is a callable
-    with a signature like this::
-
-        def namespace_handler(importer, path_entry, moduleName, module):
-            # return a path_entry to use for child packages
-
-    Namespace handlers are only called if the relevant importer object has
-    already agreed that it can handle the relevant path item.  The handler
-    should only return a subpath if the module ``__path__`` does not already
-    contain an equivalent subpath.  Otherwise, it should return None.
-
-    For an example namespace handler, see the source of the
-    ``pkg_resources.file_ns_handler`` function, which is used for both zipfile
-    importing and regular importing.
-
-
-IResourceProvider
------------------
-
-``IResourceProvider`` is an abstract class that documents what methods are
-required of objects returned by a `provider_factory` registered with
-``register_loader_type()``.  ``IResourceProvider`` is a subclass of
-``IMetadataProvider``, so objects that implement this interface must also
-implement all of the `IMetadataProvider Methods`_ as well as the methods
-shown here.  The `manager` argument to the methods below must be an object
-that supports the full `ResourceManager API`_ documented above.
-
-``get_resource_filename(manager, resource_name)``
-    Return a true filesystem path for `resource_name`, co-ordinating the
-    extraction with `manager`, if the resource must be unpacked to the
-    filesystem.
-
-``get_resource_stream(manager, resource_name)``
-    Return a readable file-like object for `resource_name`.
-
-``get_resource_string(manager, resource_name)``
-    Return a string containing the contents of `resource_name`.
-
-``has_resource(resource_name)``
-    Does the package contain the named resource?
-
-``resource_isdir(resource_name)``
-    Is the named resource a directory?  Return a false value if the resource
-    does not exist or is not a directory.
-
-``resource_listdir(resource_name)``
-    Return a list of the contents of the resource directory, ala
-    ``os.listdir()``.  Requesting the contents of a non-existent directory may
-    raise an exception.
-
-Note, by the way, that your provider classes need not (and should not) subclass
-``IResourceProvider`` or ``IMetadataProvider``!  These classes exist solely
-for documentation purposes and do not provide any useful implementation code.
-You may instead wish to subclass one of the `built-in resource providers`_.
-
-
-Built-in Resource Providers
----------------------------
-
-``pkg_resources`` includes several provider classes that are automatically used
-where appropriate.  Their inheritance tree looks like this::
-
-    NullProvider
-        EggProvider
-            DefaultProvider
-                PathMetadata
-            ZipProvider
-                EggMetadata
-        EmptyProvider
-            FileMetadata
-
-
-``NullProvider``
-    This provider class is just an abstract base that provides for common
-    provider behaviors (such as running scripts), given a definition for just
-    a few abstract methods.
-
-``EggProvider``
-    This provider class adds in some egg-specific features that are common
-    to zipped and unzipped eggs.
-
-``DefaultProvider``
-    This provider class is used for unpacked eggs and "plain old Python"
-    filesystem modules.
-
-``ZipProvider``
-    This provider class is used for all zipped modules, whether they are eggs
-    or not.
-
-``EmptyProvider``
-    This provider class always returns answers consistent with a provider that
-    has no metadata or resources.  ``Distribution`` objects created without
-    a ``metadata`` argument use an instance of this provider class instead.
-    Since all ``EmptyProvider`` instances are equivalent, there is no need
-    to have more than one instance.  ``pkg_resources`` therefore creates a
-    global instance of this class under the name ``empty_provider``, and you
-    may use it if you have need of an ``EmptyProvider`` instance.
-
-``PathMetadata(path, egg_info)``
-    Create an ``IResourceProvider`` for a filesystem-based distribution, where
-    `path` is the filesystem location of the importable modules, and `egg_info`
-    is the filesystem location of the distribution's metadata directory.
-    `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an
-    "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for
-    a "development egg".  However, other uses are possible for custom purposes.
-
-``EggMetadata(zipimporter)``
-    Create an ``IResourceProvider`` for a zipfile-based distribution.  The
-    `zipimporter` should be a ``zipimport.zipimporter`` instance, and may
-    represent a "basket" (a zipfile containing multiple ".egg" subdirectories)
-    a specific egg *within* a basket, or a zipfile egg (where the zipfile
-    itself is a ".egg").  It can also be a combination, such as a zipfile egg
-    that also contains other eggs.
-
-``FileMetadata(path_to_pkg_info)``
-    Create an ``IResourceProvider`` that provides exactly one metadata
-    resource: ``PKG-INFO``.  The supplied path should be a distutils PKG-INFO
-    file.  This is basically the same as an ``EmptyProvider``, except that
-    requests for ``PKG-INFO`` will be answered using the contents of the
-    designated file.  (This provider is used to wrap ``.egg-info`` files
-    installed by vendor-supplied system packages.)
-
-
-Utility Functions
-=================
-
-In addition to its high-level APIs, ``pkg_resources`` also includes several
-generally-useful utility routines.  These routines are used to implement the
-high-level APIs, but can also be quite useful by themselves.
-
-
-Parsing Utilities
------------------
-
-``parse_version(version)``
-    Parse a project's version string, returning a value that can be used to
-    compare versions by chronological order.  Semantically, the format is a
-    rough cross between distutils' ``StrictVersion`` and ``LooseVersion``
-    classes; if you give it versions that would work with ``StrictVersion``,
-    then they will compare the same way.  Otherwise, comparisons are more like
-    a "smarter" form of ``LooseVersion``.  It is *possible* to create
-    pathological version coding schemes that will fool this parser, but they
-    should be very rare in practice.
-
-    The returned value will be a tuple of strings.  Numeric portions of the
-    version are padded to 8 digits so they will compare numerically, but
-    without relying on how numbers compare relative to strings.  Dots are
-    dropped, but dashes are retained.  Trailing zeros between alpha segments
-    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
-    "2.4". Alphanumeric parts are lower-cased.
-
-    The algorithm assumes that strings like "-" and any alpha string that
-    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
-    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
-    considered newer than "2.4-1", which in turn is newer than "2.4".
-
-    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
-    come before "final" alphabetically) are assumed to be pre-release versions,
-    so that the version "2.4" is considered newer than "2.4a1".  Any "-"
-    characters preceding a pre-release indicator are removed.  (In versions of
-    setuptools prior to 0.6a9, "-" characters were not removed, leading to the
-    unintuitive result that "0.2-rc1" was considered a newer version than
-    "0.2".)
-
-    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
-    "rc" are treated as if they were "c", i.e. as though they were release
-    candidates, and therefore are not as new as a version string that does not
-    contain them.  And the string "dev" is treated as if it were an "@" sign;
-    that is, a version coming before even "a" or "alpha".
-
-.. _yield_lines():
-
-``yield_lines(strs)``
-    Yield non-empty/non-comment lines from a string/unicode or a possibly-
-    nested sequence thereof.  If `strs` is an instance of ``basestring``, it
-    is split into lines, and each non-blank, non-comment line is yielded after
-    stripping leading and trailing whitespace.  (Lines whose first non-blank
-    character is ``#`` are considered comment lines.)
-
-    If `strs` is not an instance of ``basestring``, it is iterated over, and
-    each item is passed recursively to ``yield_lines()``, so that an arbitarily
-    nested sequence of strings, or sequences of sequences of strings can be
-    flattened out to the lines contained therein.  So for example, passing
-    a file object or a list of strings to ``yield_lines`` will both work.
-    (Note that between each string in a sequence of strings there is assumed to
-    be an implicit line break, so lines cannot bridge two strings in a
-    sequence.)
-
-    This routine is used extensively by ``pkg_resources`` to parse metadata
-    and file formats of various kinds, and most other ``pkg_resources``
-    parsing functions that yield multiple values will use it to break up their
-    input.  However, this routine is idempotent, so calling ``yield_lines()``
-    on the output of another call to ``yield_lines()`` is completely harmless.
-
-``split_sections(strs)``
-    Split a string (or possibly-nested iterable thereof), yielding ``(section,
-    content)`` pairs found using an ``.ini``-like syntax.  Each ``section`` is
-    a whitespace-stripped version of the section name ("``[section]``")
-    and each ``content`` is a list of stripped lines excluding blank lines and
-    comment-only lines.  If there are any non-blank, non-comment lines before
-    the first section header, they're yielded in a first ``section`` of
-    ``None``.
-
-    This routine uses ``yield_lines()`` as its front end, so you can pass in
-    anything that ``yield_lines()`` accepts, such as an open text file, string,
-    or sequence of strings.  ``ValueError`` is raised if a malformed section
-    header is found (i.e. a line starting with ``[`` but not ending with
-    ``]``).
-
-    Note that this simplistic parser assumes that any line whose first nonblank
-    character is ``[`` is a section heading, so it can't support .ini format
-    variations that allow ``[`` as the first nonblank character on other lines.
-
-``safe_name(name)``
-    Return a "safe" form of a project's name, suitable for use in a
-    ``Requirement`` string, as a distribution name, or a PyPI project name.
-    All non-alphanumeric runs are condensed to single "-" characters, such that
-    a name like "The $$$ Tree" becomes "The-Tree".  Note that if you are
-    generating a filename from this value you should combine it with a call to
-    ``to_filename()`` so all dashes ("-") are replaced by underscores ("_").
-    See ``to_filename()``.
-
-``safe_version(version)``
-    Similar to ``safe_name()`` except that spaces in the input become dots, and
-    dots are allowed to exist in the output.  As with ``safe_name()``, if you
-    are generating a filename from this you should replace any "-" characters
-    in the output with underscores.
-
-``safe_extra(extra)``
-    Return a "safe" form of an extra's name, suitable for use in a requirement
-    string or a setup script's ``extras_require`` keyword.  This routine is
-    similar to ``safe_name()`` except that non-alphanumeric runs are replaced
-    by a single underbar (``_``), and the result is lowercased.
-
-``to_filename(name_or_version)``
-    Escape a name or version string so it can be used in a dash-separated
-    filename (or ``#egg=name-version`` tag) without ambiguity.  You
-    should only pass in values that were returned by ``safe_name()`` or
-    ``safe_version()``.
-
-
-Platform Utilities
-------------------
-
-``get_build_platform()``
-    Return this platform's identifier string.  For Windows, the return value
-    is ``"win32"``, and for Mac OS X it is a string of the form
-    ``"macosx-10.4-ppc"``.  All other platforms return the same uname-based
-    string that the ``distutils.util.get_platform()`` function returns.
-    This string is the minimum platform version required by distributions built
-    on the local machine.  (Backward compatibility note: setuptools versions
-    prior to 0.6b1 called this function ``get_platform()``, and the function is
-    still available under that name for backward compatibility reasons.)
-
-``get_supported_platform()`` (New in 0.6b1)
-    This is the similar to ``get_build_platform()``, but is the maximum
-    platform version that the local machine supports.  You will usually want
-    to use this value as the ``provided`` argument to the
-    ``compatible_platforms()`` function.
-
-``compatible_platforms(provided, required)``
-    Return true if a distribution built on the `provided` platform may be used
-    on the `required` platform.  If either platform value is ``None``, it is
-    considered a wildcard, and the platforms are therefore compatible.
-    Likewise, if the platform strings are equal, they're also considered
-    compatible, and ``True`` is returned.  Currently, the only non-equal
-    platform strings that are considered compatible are Mac OS X platform
-    strings with the same hardware type (e.g. ``ppc``) and major version
-    (e.g. ``10``) with the `provided` platform's minor version being less than
-    or equal to the `required` platform's minor version.
-
-``get_default_cache()``
-    Determine the default cache location for extracting resources from zipped
-    eggs.  This routine returns the ``PYTHON_EGG_CACHE`` environment variable,
-    if set.  Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of
-    the user's "Application Data" directory.  On all other systems, it returns
-    ``os.path.expanduser("~/.python-eggs")`` if ``PYTHON_EGG_CACHE`` is not
-    set.
-
-
-PEP 302 Utilities
------------------
-
-``get_importer(path_item)``
-    Retrieve a PEP 302 "importer" for the given path item (which need not
-    actually be on ``sys.path``).  This routine simulates the PEP 302 protocol
-    for obtaining an "importer" object.  It first checks for an importer for
-    the path item in ``sys.path_importer_cache``, and if not found it calls
-    each of the ``sys.path_hooks`` and caches the result if a good importer is
-    found.  If no importer is found, this routine returns an ``ImpWrapper``
-    instance that wraps the builtin import machinery as a PEP 302-compliant
-    "importer" object.  This ``ImpWrapper`` is *not* cached; instead a new
-    instance is returned each time.
-
-    (Note: When run under Python 2.5, this function is simply an alias for
-    ``pkgutil.get_importer()``, and instead of ``pkg_resources.ImpWrapper``
-    instances, it may return ``pkgutil.ImpImporter`` instances.)
-
-
-File/Path Utilities
--------------------
-
-``ensure_directory(path)``
-    Ensure that the parent directory (``os.path.dirname``) of `path` actually
-    exists, using ``os.makedirs()`` if necessary.
-
-``normalize_path(path)``
-    Return a "normalized" version of `path`, such that two paths represent
-    the same filesystem location if they have equal ``normalized_path()``
-    values.  Specifically, this is a shortcut for calling ``os.path.realpath``
-    and ``os.path.normcase`` on `path`.  Unfortunately, on certain platforms
-    (notably Cygwin and Mac OS X) the ``normcase`` function does not accurately
-    reflect the platform's case-sensitivity, so there is always the possibility
-    of two apparently-different paths being equal on such platforms.
-
-History
--------
-
-0.6c9
- * Fix ``resource_listdir('')`` always returning an empty list for zipped eggs.
-
-0.6c7
- * Fix package precedence problem where single-version eggs installed in
-   ``site-packages`` would take precedence over ``.egg`` files (or directories)
-   installed in ``site-packages``.
-
-0.6c6
- * Fix extracted C extensions not having executable permissions under Cygwin.
-
- * Allow ``.egg-link`` files to contain relative paths.
-
- * Fix cache dir defaults on Windows when multiple environment vars are needed
-   to construct a path.
-
-0.6c4
- * Fix "dev" versions being considered newer than release candidates.
-
-0.6c3
- * Python 2.5 compatibility fixes.
-
-0.6c2
- * Fix a problem with eggs specified directly on ``PYTHONPATH`` on
-   case-insensitive filesystems possibly not showing up in the default
-   working set, due to differing normalizations of ``sys.path`` entries.
-
-0.6b3
- * Fixed a duplicate path insertion problem on case-insensitive filesystems.
-
-0.6b1
- * Split ``get_platform()`` into ``get_supported_platform()`` and
-   ``get_build_platform()`` to work around a Mac versioning problem that caused
-   the behavior of ``compatible_platforms()`` to be platform specific.
-
- * Fix entry point parsing when a standalone module name has whitespace
-   between it and the extras.
-
-0.6a11
- * Added ``ExtractionError`` and ``ResourceManager.extraction_error()`` so that
-   cache permission problems get a more user-friendly explanation of the
-   problem, and so that programs can catch and handle extraction errors if they
-   need to.
-
-0.6a10
- * Added the ``extras`` attribute to ``Distribution``, the ``find_plugins()``
-   method to ``WorkingSet``, and the ``__add__()`` and ``__iadd__()`` methods
-   to ``Environment``.
-
- * ``safe_name()`` now allows dots in project names.
-
- * There is a new ``to_filename()`` function that escapes project names and
-   versions for safe use in constructing egg filenames from a Distribution
-   object's metadata.
-
- * Added ``Distribution.clone()`` method, and keyword argument support to other
-   ``Distribution`` constructors.
-
- * Added the ``DEVELOP_DIST`` precedence, and automatically assign it to
-   eggs using ``.egg-info`` format.
-
-0.6a9
- * Don't raise an error when an invalid (unfinished) distribution is found
-   unless absolutely necessary.  Warn about skipping invalid/unfinished eggs
-   when building an Environment.
-
- * Added support for ``.egg-info`` files or directories with version/platform
-   information embedded in the filename, so that system packagers have the
-   option of including ``PKG-INFO`` files to indicate the presence of a
-   system-installed egg, without needing to use ``.egg`` directories, zipfiles,
-   or ``.pth`` manipulation.
-
- * Changed ``parse_version()`` to remove dashes before pre-release tags, so
-   that ``0.2-rc1`` is considered an *older* version than ``0.2``, and is equal
-   to ``0.2rc1``.  The idea that a dash *always* meant a post-release version
-   was highly non-intuitive to setuptools users and Python developers, who
-   seem to want to use ``-rc`` version numbers a lot.
-
-0.6a8
- * Fixed a problem with ``WorkingSet.resolve()`` that prevented version
-   conflicts from being detected at runtime.
-
- * Improved runtime conflict warning message to identify a line in the user's
-   program, rather than flagging the ``warn()`` call in ``pkg_resources``.
-
- * Avoid giving runtime conflict warnings for namespace packages, even if they
-   were declared by a different package than the one currently being activated.
-
- * Fix path insertion algorithm for case-insensitive filesystems.
-
- * Fixed a problem with nested namespace packages (e.g. ``peak.util``) not
-   being set as an attribute of their parent package.
-
-0.6a6
- * Activated distributions are now inserted in ``sys.path`` (and the working
-   set) just before the directory that contains them, instead of at the end.
-   This allows e.g. eggs in ``site-packages`` to override unmanaged modules in
-   the same location, and allows eggs found earlier on ``sys.path`` to override
-   ones found later.
-
- * When a distribution is activated, it now checks whether any contained
-   non-namespace modules have already been imported and issues a warning if
-   a conflicting module has already been imported.
-
- * Changed dependency processing so that it's breadth-first, allowing a
-   depender's preferences to override those of a dependee, to prevent conflicts
-   when a lower version is acceptable to the dependee, but not the depender.
-
- * Fixed a problem extracting zipped files on Windows, when the egg in question
-   has had changed contents but still has the same version number.
-
-0.6a4
- * Fix a bug in ``WorkingSet.resolve()`` that was introduced in 0.6a3.
-
-0.6a3
- * Added ``safe_extra()`` parsing utility routine, and use it for Requirement,
-   EntryPoint, and Distribution objects' extras handling.
-
-0.6a1
- * Enhanced performance of ``require()`` and related operations when all
-   requirements are already in the working set, and enhanced performance of
-   directory scanning for distributions.
-
- * Fixed some problems using ``pkg_resources`` w/PEP 302 loaders other than
-   ``zipimport``, and the previously-broken "eager resource" support.
-
- * Fixed ``pkg_resources.resource_exists()`` not working correctly, along with
-   some other resource API bugs.
-
- * Many API changes and enhancements:
-
-   * Added ``EntryPoint``, ``get_entry_map``, ``load_entry_point``, and
-     ``get_entry_info`` APIs for dynamic plugin discovery.
-
-   * ``list_resources`` is now ``resource_listdir`` (and it actually works)
-
-   * Resource API functions like ``resource_string()`` that accepted a package
-     name and resource name, will now also accept a ``Requirement`` object in
-     place of the package name (to allow access to non-package data files in
-     an egg).
-
-   * ``get_provider()`` will now accept a ``Requirement`` instance or a module
-     name.  If it is given a ``Requirement``, it will return a corresponding
-     ``Distribution`` (by calling ``require()`` if a suitable distribution
-     isn't already in the working set), rather than returning a metadata and
-     resource provider for a specific module.  (The difference is in how
-     resource paths are interpreted; supplying a module name means resources
-     path will be module-relative, rather than relative to the distribution's
-     root.)
-
-   * ``Distribution`` objects now implement the ``IResourceProvider`` and
-     ``IMetadataProvider`` interfaces, so you don't need to reference the (no
-     longer available) ``metadata`` attribute to get at these interfaces.
-
-   * ``Distribution`` and ``Requirement`` both have a ``project_name``
-     attribute for the project name they refer to.  (Previously these were
-     ``name`` and ``distname`` attributes.)
-
-   * The ``path`` attribute of ``Distribution`` objects is now ``location``,
-     because it isn't necessarily a filesystem path (and hasn't been for some
-     time now).  The ``location`` of ``Distribution`` objects in the filesystem
-     should always be normalized using ``pkg_resources.normalize_path()``; all
-     of the setuptools and EasyInstall code that generates distributions from
-     the filesystem (including ``Distribution.from_filename()``) ensure this
-     invariant, but if you use a more generic API like ``Distribution()`` or
-     ``Distribution.from_location()`` you should take care that you don't
-     create a distribution with an un-normalized filesystem path.
-
-   * ``Distribution`` objects now have an ``as_requirement()`` method that
-     returns a ``Requirement`` for the distribution's project name and version.
-
-   * Distribution objects no longer have an ``installed_on()`` method, and the
-     ``install_on()`` method is now ``activate()`` (but may go away altogether
-     soon).  The ``depends()`` method has also been renamed to ``requires()``,
-     and ``InvalidOption`` is now ``UnknownExtra``.
-
-   * ``find_distributions()`` now takes an additional argument called ``only``,
-     that tells it to only yield distributions whose location is the passed-in
-     path.  (It defaults to False, so that the default behavior is unchanged.)
-
-   * ``AvailableDistributions`` is now called ``Environment``, and the
-     ``get()``, ``__len__()``, and ``__contains__()`` methods were removed,
-     because they weren't particularly useful.  ``__getitem__()`` no longer
-     raises ``KeyError``; it just returns an empty list if there are no
-     distributions for the named project.
-
-   * The ``resolve()`` method of ``Environment`` is now a method of
-     ``WorkingSet`` instead, and the ``best_match()`` method now uses a working
-     set instead of a path list as its second argument.
-
-   * There is a new ``pkg_resources.add_activation_listener()`` API that lets
-     you register a callback for notifications about distributions added to
-     ``sys.path`` (including the distributions already on it).  This is
-     basically a hook for extensible applications and frameworks to be able to
-     search for plugin metadata in distributions added at runtime.
-
-0.5a13
- * Fixed a bug in resource extraction from nested packages in a zipped egg.
-
-0.5a12
- * Updated extraction/cache mechanism for zipped resources to avoid inter-
-   process and inter-thread races during extraction.  The default cache
-   location can now be set via the ``PYTHON_EGGS_CACHE`` environment variable,
-   and the default Windows cache is now a ``Python-Eggs`` subdirectory of the
-   current user's "Application Data" directory, if the ``PYTHON_EGGS_CACHE``
-   variable isn't set.
-
-0.5a10
- * Fix a problem with ``pkg_resources`` being confused by non-existent eggs on
-   ``sys.path`` (e.g. if a user deletes an egg without removing it from the
-   ``easy-install.pth`` file).
-
- * Fix a problem with "basket" support in ``pkg_resources``, where egg-finding
-   never actually went inside ``.egg`` files.
-
- * Made ``pkg_resources`` import the module you request resources from, if it's
-   not already imported.
-
-0.5a4
- * ``pkg_resources.AvailableDistributions.resolve()`` and related methods now
-   accept an ``installer`` argument: a callable taking one argument, a
-   ``Requirement`` instance.  The callable must return a ``Distribution``
-   object, or ``None`` if no distribution is found.  This feature is used by
-   EasyInstall to resolve dependencies by recursively invoking itself.
-
-0.4a4
- * Fix problems with ``resource_listdir()``, ``resource_isdir()`` and resource
-   directory extraction for zipped eggs.
-
-0.4a3
- * Fixed scripts not being able to see a ``__file__`` variable in ``__main__``
-
- * Fixed a problem with ``resource_isdir()`` implementation that was introduced
-   in 0.4a2.
-
-0.4a1
- * Fixed a bug in requirements processing for exact versions (i.e. ``==`` and
-   ``!=``) when only one condition was included.
-
- * Added ``safe_name()`` and ``safe_version()`` APIs to clean up handling of
-   arbitrary distribution names and versions found on PyPI.
-
-0.3a4
- * ``pkg_resources`` now supports resource directories, not just the resources
-   in them.  In particular, there are ``resource_listdir()`` and
-   ``resource_isdir()`` APIs.
-
- * ``pkg_resources`` now supports "egg baskets" -- .egg zipfiles which contain
-   multiple distributions in subdirectories whose names end with ``.egg``.
-   Having such a "basket" in a directory on ``sys.path`` is equivalent to
-   having the individual eggs in that directory, but the contained eggs can
-   be individually added (or not) to ``sys.path``.  Currently, however, there
-   is no automated way to create baskets.
-
- * Namespace package manipulation is now protected by the Python import lock.
-
-0.3a1
- * Initial release.
-
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/python3.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/python3.txt
deleted file mode 100644
index 2f6cde4ab35ec9ddfd3d551310cac2586091ab46..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/python3.txt
+++ /dev/null
@@ -1,121 +0,0 @@
-=====================================================
-Supporting both Python 2 and Python 3 with Distribute
-=====================================================
-
-Starting with version 0.6.2, Distribute supports Python 3. Installing and
-using distribute for Python 3 code works exactly the same as for Python 2
-code, but Distribute also helps you to support Python 2 and Python 3 from
-the same source code by letting you run 2to3 on the code as a part of the
-build process, by setting the keyword parameter ``use_2to3`` to True.
-
-
-Distribute as help during porting
-=================================
-
-Distribute can make the porting process much easier by automatically running
-2to3 as a part of the test running. To do this you need to configure the
-setup.py so that you can run the unit tests with ``python setup.py test``.
-
-See :ref:`test` for more information on this.
-
-Once you have the tests running under Python 2, you can add the use_2to3
-keyword parameters to setup(), and start running the tests under Python 3.
-The test command will now first run the build command during which the code
-will be converted with 2to3, and the tests will then be run from the build
-directory, as opposed from the source directory as is normally done.
-
-Distribute will convert all Python files, and also all doctests in Python
-files. However, if you have doctests located in separate text files, these
-will not automatically be converted. By adding them to the
-``convert_2to3_doctests`` keyword parameter Distrubute will convert them as
-well.
-
-By default, the conversion uses all fixers in the ``lib2to3.fixers`` package.
-To use additional fixers, the parameter ``use_2to3_fixers`` can be set
-to a list of names of packages containing fixers. To exclude fixers, the
-parameter ``use_2to3_exclude_fixers`` can be set to fixer names to be
-skipped.
-
-A typical setup.py can look something like this::
-
-    from setuptools import setup
-
-    setup(
-        name='your.module',
-        version = '1.0',
-        description='This is your awesome module',
-        author='You',
-        author_email='your@email',
-        package_dir = {'': 'src'},
-        packages = ['your', 'you.module'],
-        test_suite = 'your.module.tests',
-        use_2to3 = True,
-        convert_2to3_doctests = ['src/your/module/README.txt'],
-        use_2to3_fixers = ['your.fixers'],
-        use_2to3_exclude_fixers = ['lib2to3.fixes.fix_import'],
-    )
-
-Differential conversion
------------------------
-
-Note that a file will only be copied and converted during the build process
-if the source file has been changed. If you add a file to the doctests
-that should be converted, it will not be converted the next time you run
-the tests, since it hasn't been modified. You need to remove it from the
-build directory. Also if you run the build, install or test commands before
-adding the use_2to3 parameter, you will have to remove the build directory
-before you run the test command, as the files otherwise will seem updated,
-and no conversion will happen.
-
-In general, if code doesn't seem to be converted, deleting the build directory
-and trying again is a good saferguard against the build directory getting
-"out of sync" with the source directory.
-
-Distributing Python 3 modules
-=============================
-
-You can distribute your modules with Python 3 support in different ways. A
-normal source distribution will work, but can be slow in installing, as the
-2to3 process will be run during the install. But you can also distribute
-the module in binary format, such as a binary egg. That egg will contain the
-already converted code, and hence no 2to3 conversion is needed during install.
-
-Advanced features
-=================
-
-If you don't want to run the 2to3 conversion on the doctests in Python files,
-you can turn that off by setting ``setuptools.use_2to3_on_doctests = False``.
-
-Note on compatibility with setuptools
-=====================================
-
-Setuptools do not know about the new keyword parameters to support Python 3.
-As a result it will warn about the unknown keyword parameters if you use
-setuptools instead of Distribute under Python 2. This is not an error, and
-install process will continue as normal, but if you want to get rid of that
-error this is easy. Simply conditionally add the new parameters into an extra
-dict and pass that dict into setup()::
-
-    from setuptools import setup
-    import sys
-
-    extra = {}
-    if sys.version_info >= (3,):
-        extra['use_2to3'] = True
-        extra['convert_2to3_doctests'] = ['src/your/module/README.txt']
-        extra['use_2to3_fixers'] = ['your.fixers']
-
-    setup(
-        name='your.module',
-        version = '1.0',
-        description='This is your awesome module',
-        author='You',
-        author_email='your@email',
-        package_dir = {'': 'src'},
-        packages = ['your', 'you.module'],
-        test_suite = 'your.module.tests',
-        **extra
-    )
-
-This way the parameters will only be used under Python 3, where you have to
-use Distribute.
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/roadmap.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/roadmap.txt
deleted file mode 100644
index ea5070eaaf8797a273928df2b3791ac669920cdb..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/roadmap.txt
+++ /dev/null
@@ -1,86 +0,0 @@
-=======
-Roadmap
-=======
-
-Distribute has two branches:
-
-- 0.6.x : provides a Setuptools-0.6cX compatible version
-- 0.7.x : will provide a refactoring
-
-0.6.x
-=====
-
-Not "much" is going to happen here, we want this branch to be helpful
-to the community *today* by addressing the 40-or-so bugs
-that were found in Setuptools and never fixed. This is eventually
-happen soon because its development is
-fast : there are up to 5 commiters that are working on it very often
-(and the number grows weekly.)
-
-The biggest issue with this branch is that it is providing the same
-packages and modules setuptools does, and this
-requires some bootstrapping work where we make sure once Distribute is
-installed, all Distribution that requires Setuptools
-will continue to work. This is done by faking the metadata of
-Setuptools 0.6c9. That's the only way we found to do this.
-
-There's one major thing though: thanks to the work of Lennart, Alex,
-Martin, this branch supports Python 3,
-which is great to have to speed up Py3 adoption.
-
-The goal of the 0.6.x is to remove as much bugs as we can, and try if
-possible to remove the patches done
-on Distutils. We will support 0.6.x maintenance for years and we will
-promote its usage everywhere instead of
-Setuptools.
-
-Some new commands are added there, when they are helpful and don't
-interact with the rest. I am thinking
-about "upload_docs" that let you upload documentation to PyPI. The
-goal is to move it to Distutils
-at some point, if the documentation feature of PyPI stays and starts to be used.
-
-0.7.x
-=====
-
-We've started to refactor Distribute with this roadmap in mind (and
-no, as someone said, it's not vaporware,
-we've done a lot already)
-
-- 0.7.x can be installed and used with 0.6.x
-
-- easy_install is going to be deprecated ! use Pip !
-
-- the version system will be deprecated, in favor of the one in Distutils
-
-- no more Distutils monkey-patch that happens once you use the code
-  (things like 'from distutils import cmd; cmd.Command = CustomCommand')
-
-- no more custom site.py (that is: if something misses in Python's
-  site.py we'll add it there instead of patching it)
-
-- no more namespaced packages system, if PEP 382 (namespaces package
-  support) makes it to 2.7
-
-- The code is splitted in many packages and might be distributed under
-  several distributions.
-
- - distribute.resources: that's the old pkg_resources, but
-   reorganized in clean, pep-8 modules. This package will
-   only contain the query APIs and will focus on being PEP 376
-   compatible. We will promote its usage and see if Pip wants
-   to use it as a basis.
-   It will probably shrink a lot though, once the stdlib provides PEP 376 support.
-
- - distribute.entrypoints: that's the old pkg_resources entry points
-   system, but on its own. it uses distribute.resources
-
- - distribute.index: that's package_index and a few other things.
-   everything required to interact with PyPI. We will promote
-   its usage and see if Pip wants to use it as a basis.
-
- - distribute.core (might be renamed to main): that's everything
-   else, and uses the other packages.
-
-Goal: A first release before (or when) Python 2.7 / 3.2 is out.
-
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/setuptools.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/setuptools.txt
deleted file mode 100644
index 31ecc931f259d3d4d13ceed1663b8e6aa088b0ca..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/setuptools.txt
+++ /dev/null
@@ -1,3236 +0,0 @@
-==================================================
-Building and Distributing Packages with Distribute
-==================================================
-
-``Distribute`` is a collection of enhancements to the Python ``distutils``
-(for Python 2.3.5 and up on most platforms; 64-bit platforms require a minimum
-of Python 2.4) that allow you to more easily build and distribute Python
-packages, especially ones that have dependencies on other packages.
-
-Packages built and distributed using ``setuptools`` look to the user like
-ordinary Python packages based on the ``distutils``.  Your users don't need to
-install or even know about setuptools in order to use them, and you don't
-have to include the entire setuptools package in your distributions.  By
-including just a single `bootstrap module`_ (an 8K .py file), your package will
-automatically download and install ``setuptools`` if the user is building your
-package from source and doesn't have a suitable version already installed.
-
-.. _bootstrap module: http://nightly.ziade.org/distribute_setup.py
-
-Feature Highlights:
-
-* Automatically find/download/install/upgrade dependencies at build time using
-  the `EasyInstall tool <http://peak.telecommunity.com/DevCenter/EasyInstall>`_,
-  which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
-  automatically scans web pages linked from PyPI to find download links.  (It's
-  the closest thing to CPAN currently available for Python.)
-
-* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
-  a single-file importable distribution format
-
-* Include data files inside your package directories, where your code can
-  actually use them.  (Python 2.4 distutils also supports this feature, but
-  setuptools provides the feature for Python 2.3 packages also, and supports
-  accessing data files in zipped packages too.)
-
-* Automatically include all packages in your source tree, without listing them
-  individually in setup.py
-
-* Automatically include all relevant files in your source distributions,
-  without needing to create a ``MANIFEST.in`` file, and without having to force
-  regeneration of the ``MANIFEST`` file when your source tree changes.
-
-* Automatically generate wrapper scripts or Windows (console and GUI) .exe
-  files for any number of "main" functions in your project.  (Note: this is not
-  a py2exe replacement; the .exe files rely on the local Python installation.)
-
-* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
-  still work even when the end-user doesn't have Pyrex installed (as long as
-  you include the Pyrex-generated C in your source distribution)
-
-* Command aliases - create project-specific, per-user, or site-wide shortcut
-  names for commonly used commands and options
-
-* PyPI upload support - upload your source distributions and eggs to PyPI
-
-* Deploy your project in "development mode", such that it's available on
-  ``sys.path``, yet can still be edited directly from its source checkout.
-
-* Easily extend the distutils with new commands or ``setup()`` arguments, and
-  distribute/reuse your extensions for multiple projects, without copying code.
-
-* Create extensible applications and frameworks that automatically discover
-  extensions, using simple "entry points" declared in a project's setup script.
-
-In addition to the PyPI downloads, the development version of ``setuptools``
-is available from the `Python SVN sandbox`_, and in-development versions of the
-`0.6 branch`_ are available as well.
-
-.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
-
-.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
-
-.. contents:: **Table of Contents**
-
-.. _distribute_setup.py: `bootstrap module`_
-
-
------------------
-Developer's Guide
------------------
-
-
-Installing ``setuptools``
-=========================
-
-Please follow the `EasyInstall Installation Instructions`_ to install the
-current stable version of setuptools.  In particular, be sure to read the
-section on `Custom Installation Locations`_ if you are installing anywhere
-other than Python's ``site-packages`` directory.
-
-.. _EasyInstall Installation Instructions: http://peak.telecommunity.com/DevCenter/EasyInstall#installation-instructions
-
-.. _Custom Installation Locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
-
-If you want the current in-development version of setuptools, you should first
-install a stable version, and then run::
-
-    distribute_setup.py setuptools==dev
-
-This will download and install the latest development (i.e. unstable) version
-of setuptools from the Python Subversion sandbox.
-
-
-Basic Use
-=========
-
-For basic use of setuptools, just import things from setuptools instead of
-the distutils.  Here's a minimal setup script using setuptools::
-
-    from setuptools import setup, find_packages
-    setup(
-        name = "HelloWorld",
-        version = "0.1",
-        packages = find_packages(),
-    )
-
-As you can see, it doesn't take much to use setuptools in a project.
-Just by doing the above, this project will be able to produce eggs, upload to
-PyPI, and automatically include all packages in the directory where the
-setup.py lives.  See the `Command Reference`_ section below to see what
-commands you can give to this setup script.
-
-Of course, before you release your project to PyPI, you'll want to add a bit
-more information to your setup script to help people find or learn about your
-project.  And maybe your project will have grown by then to include a few
-dependencies, and perhaps some data files and scripts::
-
-    from setuptools import setup, find_packages
-    setup(
-        name = "HelloWorld",
-        version = "0.1",
-        packages = find_packages(),
-        scripts = ['say_hello.py'],
-
-        # Project uses reStructuredText, so ensure that the docutils get
-        # installed or upgraded on the target machine
-        install_requires = ['docutils>=0.3'],
-
-        package_data = {
-            # If any package contains *.txt or *.rst files, include them:
-            '': ['*.txt', '*.rst'],
-            # And include any *.msg files found in the 'hello' package, too:
-            'hello': ['*.msg'],
-        },
-
-        # metadata for upload to PyPI
-        author = "Me",
-        author_email = "me@example.com",
-        description = "This is an Example Package",
-        license = "PSF",
-        keywords = "hello world example examples",
-        url = "http://example.com/HelloWorld/",   # project home page, if any
-
-        # could also include long_description, download_url, classifiers, etc.
-    )
-
-In the sections that follow, we'll explain what most of these ``setup()``
-arguments do (except for the metadata ones), and the various ways you might use
-them in your own project(s).
-
-
-Specifying Your Project's Version
----------------------------------
-
-Setuptools can work well with most versioning schemes; there are, however, a
-few special things to watch out for, in order to ensure that setuptools and
-EasyInstall can always tell what version of your package is newer than another
-version.  Knowing these things will also help you correctly specify what
-versions of other projects your project depends on.
-
-A version consists of an alternating series of release numbers and pre-release
-or post-release tags.  A release number is a series of digits punctuated by
-dots, such as ``2.4`` or ``0.5``.  Each series of digits is treated
-numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the
-same release number, denoting the first subrelease of release 2.  But  ``2.10``
-is the *tenth* subrelease of release 2, and so is a different and newer release
-from ``2.1`` or ``2.1.0``.  Leading zeros within a series of digits are also
-ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``.
-
-Following a release number, you can have either a pre-release or post-release
-tag.  Pre-release tags make a version be considered *older* than the version
-they are appended to.  So, revision ``2.4`` is *newer* than revision ``2.4c1``,
-which in turn is newer than ``2.4b1`` or ``2.4a1``.  Postrelease tags make
-a version be considered *newer* than the version they are appended to.  So,
-revisions like ``2.4-1`` and ``2.4pl3`` are newer than ``2.4``, but are *older*
-than ``2.4.1`` (which has a higher release number).
-
-A pre-release tag is a series of letters that are alphabetically before
-"final".  Some examples of prerelease tags would include ``alpha``, ``beta``,
-``a``, ``c``, ``dev``, and so on.  You do not have to place a dot before
-the prerelease tag if it's immediately after a number, but it's okay to do
-so if you prefer.  Thus, ``2.4c1`` and ``2.4.c1`` both represent release
-candidate 1 of version ``2.4``, and are treated as identical by setuptools.
-
-In addition, there are three special prerelease tags that are treated as if
-they were the letter ``c``: ``pre``, ``preview``, and ``rc``.  So, version
-``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as
-``2.4c1``, and are treated as identical by setuptools.
-
-A post-release tag is either a series of letters that are alphabetically
-greater than or equal to "final", or a dash (``-``).  Post-release tags are
-generally used to separate patch numbers, port numbers, build numbers, revision
-numbers, or date stamps from the release number.  For example, the version
-``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of
-version ``2.4``.  Or you might use ``2.4-20051127`` to denote a date-stamped
-post-release.
-
-Notice that after each pre or post-release tag, you are free to place another
-release number, followed again by more pre- or post-release tags.  For example,
-``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in-
-development version of the ninth alpha of release 0.6.  Notice that ``dev`` is
-a pre-release tag, so this version is a *lower* version number than ``0.6a9``,
-which would be the actual ninth alpha of release 0.6.  But the ``-r41475`` is
-a post-release tag, so this version is *newer* than ``0.6a9.dev``.
-
-For the most part, setuptools' interpretation of version numbers is intuitive,
-but here are a few tips that will keep you out of trouble in the corner cases:
-
-* Don't use ``-`` or any other character than ``.`` as a separator, unless you
-  really want a post-release.  Remember that ``2.1-rc2`` means you've
-  *already* released ``2.1``, whereas ``2.1rc2`` and ``2.1.c2`` are candidates
-  you're putting out *before* ``2.1``.  If you accidentally distribute copies
-  of a post-release that you meant to be a pre-release, the only safe fix is to
-  bump your main release number (e.g. to ``2.1.1``) and re-release the project.
-
-* Don't stick adjoining pre-release tags together without a dot or number
-  between them.  Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``,
-  *not* a development pre-release of ``1.9a``.  Use ``.dev`` instead, as in
-  ``1.9a.dev``, or separate the prerelease tags with a number, as in
-  ``1.9a0dev``.  ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are
-  identical versions from setuptools' point of view, so you can use whatever
-  scheme you prefer.
-
-* If you want to be certain that your chosen numbering scheme works the way
-  you think it will, you can use the ``pkg_resources.parse_version()`` function
-  to compare different version numbers::
-
-    >>> from pkg_resources import parse_version
-    >>> parse_version('1.9.a.dev') == parse_version('1.9a0dev')
-    True
-    >>> parse_version('2.1-rc2') < parse_version('2.1')
-    False
-    >>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9')
-    True
-
-Once you've decided on a version numbering scheme for your project, you can
-have setuptools automatically tag your in-development releases with various
-pre- or post-release tags.  See the following sections for more details:
-
-* `Tagging and "Daily Build" or "Snapshot" Releases`_
-* `Managing "Continuous Releases" Using Subversion`_
-* The `egg_info`_ command
-
-
-New and Changed ``setup()`` Keywords
-====================================
-
-The following keyword arguments to ``setup()`` are added or changed by
-``setuptools``.  All of them are optional; you do not have to supply them
-unless you need the associated ``setuptools`` feature.
-
-``include_package_data``
-    If set to ``True``, this tells ``setuptools`` to automatically include any
-    data files it finds inside your package directories, that are either under
-    CVS or Subversion control, or which are specified by your ``MANIFEST.in``
-    file.  For more information, see the section below on `Including Data
-    Files`_.
-
-``exclude_package_data``
-    A dictionary mapping package names to lists of glob patterns that should
-    be *excluded* from your package directories.  You can use this to trim back
-    any excess files included by ``include_package_data``.  For a complete
-    description and examples, see the section below on `Including Data Files`_.
-
-``package_data``
-    A dictionary mapping package names to lists of glob patterns.  For a
-    complete description and examples, see the section below on `Including
-    Data Files`_.  You do not need to use this option if you are using
-    ``include_package_data``, unless you need to add e.g. files that are
-    generated by your setup script and build process.  (And are therefore not
-    in source control or are files that you don't want to include in your
-    source distribution.)
-
-``zip_safe``
-    A boolean (True or False) flag specifying whether the project can be
-    safely installed and run from a zip file.  If this argument is not
-    supplied, the ``bdist_egg`` command will have to analyze all of your
-    project's contents for possible problems each time it buids an egg.
-
-``install_requires``
-    A string or list of strings specifying what other distributions need to
-    be installed when this one is.  See the section below on `Declaring
-    Dependencies`_ for details and examples of the format of this argument.
-
-``entry_points``
-    A dictionary mapping entry point group names to strings or lists of strings
-    defining the entry points.  Entry points are used to support dynamic
-    discovery of services or plugins provided by a project.  See `Dynamic
-    Discovery of Services and Plugins`_ for details and examples of the format
-    of this argument.  In addition, this keyword is used to support `Automatic
-    Script Creation`_.
-
-``extras_require``
-    A dictionary mapping names of "extras" (optional features of your project)
-    to strings or lists of strings specifying what other distributions must be
-    installed to support those features.  See the section below on `Declaring
-    Dependencies`_ for details and examples of the format of this argument.
-
-``setup_requires``
-    A string or list of strings specifying what other distributions need to
-    be present in order for the *setup script* to run.  ``setuptools`` will
-    attempt to obtain these (even going so far as to download them using
-    ``EasyInstall``) before processing the rest of the setup script or commands.
-    This argument is needed if you are using distutils extensions as part of
-    your build process; for example, extensions that process setup() arguments
-    and turn them into EGG-INFO metadata files.
-
-    (Note: projects listed in ``setup_requires`` will NOT be automatically
-    installed on the system where the setup script is being run.  They are
-    simply downloaded to the setup directory if they're not locally available
-    already.  If you want them to be installed, as well as being available
-    when the setup script is run, you should add them to ``install_requires``
-    **and** ``setup_requires``.)
-
-``dependency_links``
-    A list of strings naming URLs to be searched when satisfying dependencies.
-    These links will be used if needed to install packages specified by
-    ``setup_requires`` or ``tests_require``.  They will also be written into
-    the egg's metadata for use by tools like EasyInstall to use when installing
-    an ``.egg`` file.
-
-``namespace_packages``
-    A list of strings naming the project's "namespace packages".  A namespace
-    package is a package that may be split across multiple project
-    distributions.  For example, Zope 3's ``zope`` package is a namespace
-    package, because subpackages like ``zope.interface`` and ``zope.publisher``
-    may be distributed separately.  The egg runtime system can automatically
-    merge such subpackages into a single parent package at runtime, as long
-    as you declare them in each project that contains any subpackages of the
-    namespace package, and as long as the namespace package's ``__init__.py``
-    does not contain any code other than a namespace declaration.  See the
-    section below on `Namespace Packages`_ for more information.
-
-``test_suite``
-    A string naming a ``unittest.TestCase`` subclass (or a package or module
-    containing one or more of them, or a method of such a subclass), or naming
-    a function that can be called with no arguments and returns a
-    ``unittest.TestSuite``.  If the named suite is a module, and the module
-    has an ``additional_tests()`` function, it is called and the results are
-    added to the tests to be run.  If the named suite is a package, any
-    submodules and subpackages are recursively added to the overall test suite.
-
-    Specifying this argument enables use of the `test`_ command to run the
-    specified test suite, e.g. via ``setup.py test``.  See the section on the
-    `test`_ command below for more details.
-
-``tests_require``
-    If your project's tests need one or more additional packages besides those
-    needed to install it, you can use this option to specify them.  It should
-    be a string or list of strings specifying what other distributions need to
-    be present for the package's tests to run.  When you run the ``test``
-    command, ``setuptools`` will  attempt to obtain these (even going
-    so far as to download them using ``EasyInstall``).  Note that these
-    required projects will *not* be installed on the system where the tests
-    are run, but only downloaded to the project's setup directory if they're
-    not already installed locally.
-
-.. _test_loader:
-
-``test_loader``
-    If you would like to use a different way of finding tests to run than what
-    setuptools normally uses, you can specify a module name and class name in
-    this argument.  The named class must be instantiable with no arguments, and
-    its instances must support the ``loadTestsFromNames()`` method as defined
-    in the Python ``unittest`` module's ``TestLoader`` class.  Setuptools will
-    pass only one test "name" in the `names` argument: the value supplied for
-    the ``test_suite`` argument.  The loader you specify may interpret this
-    string in any way it likes, as there are no restrictions on what may be
-    contained in a ``test_suite`` string.
-
-    The module name and class name must be separated by a ``:``.  The default
-    value of this argument is ``"setuptools.command.test:ScanningLoader"``.  If
-    you want to use the default ``unittest`` behavior, you can specify
-    ``"unittest:TestLoader"`` as your ``test_loader`` argument instead.  This
-    will prevent automatic scanning of submodules and subpackages.
-
-    The module and class you specify here may be contained in another package,
-    as long as you use the ``tests_require`` option to ensure that the package
-    containing the loader class is available when the ``test`` command is run.
-
-``eager_resources``
-    A list of strings naming resources that should be extracted together, if
-    any of them is needed, or if any C extensions included in the project are
-    imported.  This argument is only useful if the project will be installed as
-    a zipfile, and there is a need to have all of the listed resources be
-    extracted to the filesystem *as a unit*.  Resources listed here
-    should be '/'-separated paths, relative to the source root, so to list a
-    resource ``foo.png`` in package ``bar.baz``, you would include the string
-    ``bar/baz/foo.png`` in this argument.
-
-    If you only need to obtain resources one at a time, or you don't have any C
-    extensions that access other files in the project (such as data files or
-    shared libraries), you probably do NOT need this argument and shouldn't
-    mess with it.  For more details on how this argument works, see the section
-    below on `Automatic Resource Extraction`_.
-
-``use_2to3``
-    Convert the source code from Python 2 to Python 3 with 2to3 during the
-    build process. See :doc:`python3` for more details.
-
-``convert_2to3_doctests``
-    List of doctest source files that need to be converted with 2to3.
-    See :doc:`python3` for more details.
-
-``use_2to3_fixers``
-    A list of modules to search for additional fixers to be used during
-    the 2to3 conversion. See :doc:`python3` for more details.
-
-
-Using ``find_packages()``
--------------------------
-
-For simple projects, it's usually easy enough to manually add packages to
-the ``packages`` argument of ``setup()``.  However, for very large projects
-(Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the
-package list updated.  That's what ``setuptools.find_packages()`` is for.
-
-``find_packages()`` takes a source directory, and a list of package names or
-patterns to exclude.  If omitted, the source directory defaults to the same
-directory as the setup script.  Some projects use a ``src`` or ``lib``
-directory as the root of their source tree, and those projects would of course
-use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``.  (And
-such projects also need something like ``package_dir = {'':'src'}`` in their
-``setup()`` arguments, but that's just a normal distutils thing.)
-
-Anyway, ``find_packages()`` walks the target directory, and finds Python
-packages by looking for ``__init__.py`` files.  It then filters the list of
-packages using the exclusion patterns.
-
-Exclusion patterns are package names, optionally including wildcards.  For
-example, ``find_packages(exclude=["*.tests"])`` will exclude all packages whose
-last name part is ``tests``.   Or, ``find_packages(exclude=["*.tests",
-"*.tests.*"])`` will also exclude any subpackages of packages named ``tests``,
-but it still won't exclude a top-level ``tests`` package or the children
-thereof.  In fact, if you really want no ``tests`` packages at all, you'll need
-something like this::
-
-    find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
-
-in order to cover all the bases.  Really, the exclusion patterns are intended
-to cover simpler use cases than this, like excluding a single, specified
-package and its subpackages.
-
-Regardless of the target directory or exclusions, the ``find_packages()``
-function returns a list of package names suitable for use as the ``packages``
-argument to ``setup()``, and so is usually the easiest way to set that
-argument in your setup script.  Especially since it frees you from having to
-remember to modify your setup script whenever your project grows additional
-top-level packages or subpackages.
-
-
-Automatic Script Creation
-=========================
-
-Packaging and installing scripts can be a bit awkward with the distutils.  For
-one thing, there's no easy way to have a script's filename match local
-conventions on both Windows and POSIX platforms.  For another, you often have
-to create a separate file just for the "main" script, when your actual "main"
-is a function in a module somewhere.  And even in Python 2.4, using the ``-m``
-option only works for actual ``.py`` files that aren't installed in a package.
-
-``setuptools`` fixes all of these problems by automatically generating scripts
-for you with the correct extension, and on Windows it will even create an
-``.exe`` file so that users don't have to change their ``PATHEXT`` settings.
-The way to use this feature is to define "entry points" in your setup script
-that indicate what function the generated script should import and run.  For
-example, to create two console scripts called ``foo`` and ``bar``, and a GUI
-script called ``baz``, you might do something like this::
-
-    setup(
-        # other arguments here...
-        entry_points = {
-            'console_scripts': [
-                'foo = my_package.some_module:main_func',
-                'bar = other_module:some_func',
-            ],
-            'gui_scripts': [
-                'baz = my_package_gui.start_func',
-            ]
-        }
-    )
-
-When this project is installed on non-Windows platforms (using "setup.py
-install", "setup.py develop", or by using EasyInstall), a set of ``foo``,
-``bar``, and ``baz`` scripts will be installed that import ``main_func`` and
-``some_func`` from the specified modules.  The functions you specify are called
-with no arguments, and their return value is passed to ``sys.exit()``, so you
-can return an errorlevel or message to print to stderr.
-
-On Windows, a set of ``foo.exe``, ``bar.exe``, and ``baz.exe`` launchers are
-created, alongside a set of ``foo.py``, ``bar.py``, and ``baz.pyw`` files.  The
-``.exe`` wrappers find and execute the right version of Python to run the
-``.py`` or ``.pyw`` file.
-
-You may define as many "console script" and "gui script" entry points as you
-like, and each one can optionally specify "extras" that it depends on, that
-will be added to ``sys.path`` when the script is run.  For more information on
-"extras", see the section below on `Declaring Extras`_.  For more information
-on "entry points" in general, see the section below on `Dynamic Discovery of
-Services and Plugins`_.
-
-
-"Eggsecutable" Scripts
-----------------------
-
-Occasionally, there are situations where it's desirable to make an ``.egg``
-file directly executable.  You can do this by including an entry point such
-as the following::
-
-    setup(
-        # other arguments here...
-        entry_points = {
-            'setuptools.installation': [
-                'eggsecutable = my_package.some_module:main_func',
-            ]
-        }
-    )
-
-Any eggs built from the above setup script will include a short excecutable
-prelude that imports and calls ``main_func()`` from ``my_package.some_module``.
-The prelude can be run on Unix-like platforms (including Mac and Linux) by
-invoking the egg with ``/bin/sh``, or by enabling execute permissions on the
-``.egg`` file.  For the executable prelude to run, the appropriate version of
-Python must be available via the ``PATH`` environment variable, under its
-"long" name.  That is, if the egg is built for Python 2.3, there must be a
-``python2.3`` executable present in a directory on ``PATH``.
-
-This feature is primarily intended to support distribute_setup the installation of
-setuptools itself on non-Windows platforms, but may also be useful for other
-projects as well.
-
-IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or
-invoked via symlinks.  They *must* be invoked using their original filename, in
-order to ensure that, once running, ``pkg_resources`` will know what project
-and version is in use.  The header script will check this and exit with an
-error if the ``.egg`` file has been renamed or is invoked via a symlink that
-changes its base name.
-
-
-Declaring Dependencies
-======================
-
-``setuptools`` supports automatically installing dependencies when a package is
-installed, and including information about dependencies in Python Eggs (so that
-package management tools like EasyInstall can use the information).
-
-``setuptools`` and ``pkg_resources`` use a common syntax for specifying a
-project's required dependencies.  This syntax consists of a project's PyPI
-name, optionally followed by a comma-separated list of "extras" in square
-brackets, optionally followed by a comma-separated list of version
-specifiers.  A version specifier is one of the operators ``<``, ``>``, ``<=``,
-``>=``, ``==`` or ``!=``, followed by a version identifier.  Tokens may be
-separated by whitespace, but any whitespace or nonstandard characters within a
-project name or version identifier must be replaced with ``-``.
-
-Version specifiers for a given project are internally sorted into ascending
-version order, and used to establish what ranges of versions are acceptable.
-Adjacent redundant conditions are also consolidated (e.g. ``">1, >2"`` becomes
-``">1"``, and ``"<2,<3"`` becomes ``"<3"``). ``"!="`` versions are excised from
-the ranges they fall within.  A project's version is then checked for
-membership in the resulting ranges. (Note that providing conflicting conditions
-for the same version (e.g. "<2,>=2" or "==2,!=2") is meaningless and may
-therefore produce bizarre results.)
-
-Here are some example requirement specifiers::
-
-    docutils >= 0.3
-
-    # comment lines and \ continuations are allowed in requirement strings
-    BazSpam ==1.1, ==1.2, ==1.3, ==1.4, ==1.5, \
-        ==1.6, ==1.7  # and so are line-end comments
-
-    PEAK[FastCGI, reST]>=0.5a4
-
-    setuptools==0.5a7
-
-The simplest way to include requirement specifiers is to use the
-``install_requires`` argument to ``setup()``.  It takes a string or list of
-strings containing requirement specifiers.  If you include more than one
-requirement in a string, each requirement must begin on a new line.
-
-This has three effects:
-
-1. When your project is installed, either by using EasyInstall, ``setup.py
-   install``, or ``setup.py develop``, all of the dependencies not already
-   installed will be located (via PyPI), downloaded, built (if necessary),
-   and installed.
-
-2. Any scripts in your project will be installed with wrappers that verify
-   the availability of the specified dependencies at runtime, and ensure that
-   the correct versions are added to ``sys.path`` (e.g. if multiple versions
-   have been installed).
-
-3. Python Egg distributions will include a metadata file listing the
-   dependencies.
-
-Note, by the way, that if you declare your dependencies in ``setup.py``, you do
-*not* need to use the ``require()`` function in your scripts or modules, as
-long as you either install the project or use ``setup.py develop`` to do
-development work on it.  (See `"Development Mode"`_ below for more details on
-using ``setup.py develop``.)
-
-
-Dependencies that aren't in PyPI
---------------------------------
-
-If your project depends on packages that aren't registered in PyPI, you may
-still be able to depend on them, as long as they are available for download
-as:
-
-- an egg, in the standard distutils ``sdist`` format,
-- a single ``.py`` file, or
-- a VCS repository (Subversion, Mercurial, or Git).
-
-You just need to add some URLs to the ``dependency_links`` argument to
-``setup()``.
-
-The URLs must be either:
-
-1. direct download URLs,
-2. the URLs of web pages that contain direct download links, or
-3. the repository's URL
-
-In general, it's better to link to web pages, because it is usually less
-complex to update a web page than to release a new version of your project.
-You can also use a SourceForge ``showfiles.php`` link in the case where a
-package you depend on is distributed via SourceForge.
-
-If you depend on a package that's distributed as a single ``.py`` file, you
-must include an ``"#egg=project-version"`` suffix to the URL, to give a project
-name and version number.  (Be sure to escape any dashes in the name or version
-by replacing them with underscores.)  EasyInstall will recognize this suffix
-and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file
-as an egg.
-
-In the case of a VCS checkout, you should also append ``#egg=project-version``
-in order to identify for what package that checkout should be used. You can
-append ``@REV`` to the URL's path (before the fragment) to specify a revision.
-Additionally, you can also force the VCS being used by prepending the URL with
-a certain prefix. Currently available are:
-
--  ``svn+URL`` for Subversion,
--  ``git+URL`` for Git, and
--  ``hg+URL`` for Mercurial
-
-A more complete example would be:
-
-    ``vcs+proto://host/path@revision#egg=project-version``
-
-Be careful with the version. It should match the one inside the project files.
-If you want do disregard the version, you have to omit it both in the
-``requires`` and in the URL's fragment.
-
-This will do a checkout (or a clone, in Git and Mercurial parlance) to a
-temporary folder and run ``setup.py bdist_egg``.
-
-The ``dependency_links`` option takes the form of a list of URL strings.  For
-example, the below will cause EasyInstall to search the specified page for
-eggs or source distributions, if the package's dependencies aren't already
-installed::
-
-    setup(
-        ...
-        dependency_links = [
-            "http://peak.telecommunity.com/snapshots/"
-        ],
-    )
-
-
-.. _Declaring Extras:
-
-
-Declaring "Extras" (optional features with their own dependencies)
-------------------------------------------------------------------
-
-Sometimes a project has "recommended" dependencies, that are not required for
-all uses of the project.  For example, a project might offer optional PDF
-output if ReportLab is installed, and reStructuredText support if docutils is
-installed.  These optional features are called "extras", and setuptools allows
-you to define their requirements as well.  In this way, other projects that
-require these optional features can force the additional requirements to be
-installed, by naming the desired extras in their ``install_requires``.
-
-For example, let's say that Project A offers optional PDF and reST support::
-
-    setup(
-        name="Project-A",
-        ...
-        extras_require = {
-            'PDF':  ["ReportLab>=1.2", "RXP"],
-            'reST': ["docutils>=0.3"],
-        }
-    )
-
-As you can see, the ``extras_require`` argument takes a dictionary mapping
-names of "extra" features, to strings or lists of strings describing those
-features' requirements.  These requirements will *not* be automatically
-installed unless another package depends on them (directly or indirectly) by
-including the desired "extras" in square brackets after the associated project
-name.  (Or if the extras were listed in a requirement spec on the EasyInstall
-command line.)
-
-Extras can be used by a project's `entry points`_ to specify dynamic
-dependencies.  For example, if Project A includes a "rst2pdf" script, it might
-declare it like this, so that the "PDF" requirements are only resolved if the
-"rst2pdf" script is run::
-
-    setup(
-        name="Project-A",
-        ...
-        entry_points = {
-            'console_scripts':
-                ['rst2pdf = project_a.tools.pdfgen [PDF]'],
-                ['rst2html = project_a.tools.htmlgen'],
-                # more script entry points ...
-        }
-    )
-
-Projects can also use another project's extras when specifying dependencies.
-For example, if project B needs "project A" with PDF support installed, it
-might declare the dependency like this::
-
-    setup(
-        name="Project-B",
-        install_requires = ["Project-A[PDF]"],
-        ...
-    )
-
-This will cause ReportLab to be installed along with project A, if project B is
-installed -- even if project A was already installed.  In this way, a project
-can encapsulate groups of optional "downstream dependencies" under a feature
-name, so that packages that depend on it don't have to know what the downstream
-dependencies are.  If a later version of Project A builds in PDF support and
-no longer needs ReportLab, or if it ends up needing other dependencies besides
-ReportLab in order to provide PDF support, Project B's setup information does
-not need to change, but the right packages will still be installed if needed.
-
-Note, by the way, that if a project ends up not needing any other packages to
-support a feature, it should keep an empty requirements list for that feature
-in its ``extras_require`` argument, so that packages depending on that feature
-don't break (due to an invalid feature name).  For example, if Project A above
-builds in PDF support and no longer needs ReportLab, it could change its
-setup to this::
-
-    setup(
-        name="Project-A",
-        ...
-        extras_require = {
-            'PDF':  [],
-            'reST': ["docutils>=0.3"],
-        }
-    )
-
-so that Package B doesn't have to remove the ``[PDF]`` from its requirement
-specifier.
-
-
-Including Data Files
-====================
-
-The distutils have traditionally allowed installation of "data files", which
-are placed in a platform-specific location.  However, the most common use case
-for data files distributed with a package is for use *by* the package, usually
-by including the data files in the package directory.
-
-Setuptools offers three ways to specify data files to be included in your
-packages.  First, you can simply use the ``include_package_data`` keyword,
-e.g.::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        include_package_data = True
-    )
-
-This tells setuptools to install any data files it finds in your packages.
-The data files must be under CVS or Subversion control, or else they must be
-specified via the distutils' ``MANIFEST.in`` file.  (They can also be tracked
-by another revision control system, using an appropriate plugin.  See the
-section below on `Adding Support for Other Revision Control Systems`_ for
-information on how to write such plugins.)
-
-If the data files are not under version control, or are not in a supported
-version control system, or if you want finer-grained control over what files
-are included (for example, if you have documentation files in your package
-directories and want to exclude them from installation), then you can also use
-the ``package_data`` keyword, e.g.::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        package_data = {
-            # If any package contains *.txt or *.rst files, include them:
-            '': ['*.txt', '*.rst'],
-            # And include any *.msg files found in the 'hello' package, too:
-            'hello': ['*.msg'],
-        }
-    )
-
-The ``package_data`` argument is a dictionary that maps from package names to
-lists of glob patterns.  The globs may include subdirectory names, if the data
-files are contained in a subdirectory of the package.  For example, if the
-package tree looks like this::
-
-    setup.py
-    src/
-        mypkg/
-            __init__.py
-            mypkg.txt
-            data/
-                somefile.dat
-                otherdata.dat
-
-The setuptools setup file might look like this::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        packages = find_packages('src'),  # include all packages under src
-        package_dir = {'':'src'},   # tell distutils packages are under src
-
-        package_data = {
-            # If any package contains *.txt files, include them:
-            '': ['*.txt'],
-            # And include any *.dat files found in the 'data' subdirectory
-            # of the 'mypkg' package, also:
-            'mypkg': ['data/*.dat'],
-        }
-    )
-
-Notice that if you list patterns in ``package_data`` under the empty string,
-these patterns are used to find files in every package, even ones that also
-have their own patterns listed.  Thus, in the above example, the ``mypkg.txt``
-file gets included even though it's not listed in the patterns for ``mypkg``.
-
-Also notice that if you use paths, you *must* use a forward slash (``/``) as
-the path separator, even if you are on Windows.  Setuptools automatically
-converts slashes to appropriate platform-specific separators at build time.
-
-(Note: although the ``package_data`` argument was previously only available in
-``setuptools``, it was also added to the Python ``distutils`` package as of
-Python 2.4; there is `some documentation for the feature`__ available on the
-python.org website.  If using the setuptools-specific ``include_package_data``
-argument, files specified by ``package_data`` will *not* be automatically
-added to the manifest unless they are tracked by a supported version control
-system, or are listed in the MANIFEST.in file.)
-
-__ http://docs.python.org/dist/node11.html
-
-Sometimes, the ``include_package_data`` or ``package_data`` options alone
-aren't sufficient to precisely define what files you want included.  For
-example, you may want to include package README files in your revision control
-system and source distributions, but exclude them from being installed.  So,
-setuptools offers an ``exclude_package_data`` option as well, that allows you
-to do things like this::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        packages = find_packages('src'),  # include all packages under src
-        package_dir = {'':'src'},   # tell distutils packages are under src
-
-        include_package_data = True,    # include everything in source control
-
-        # ...but exclude README.txt from all packages
-        exclude_package_data = { '': ['README.txt'] },
-    )
-
-The ``exclude_package_data`` option is a dictionary mapping package names to
-lists of wildcard patterns, just like the ``package_data`` option.  And, just
-as with that option, a key of ``''`` will apply the given pattern(s) to all
-packages.  However, any files that match these patterns will be *excluded*
-from installation, even if they were listed in ``package_data`` or were
-included as a result of using ``include_package_data``.
-
-In summary, the three options allow you to:
-
-``include_package_data``
-    Accept all data files and directories matched by ``MANIFEST.in`` or found
-    in source control.
-
-``package_data``
-    Specify additional patterns to match files and directories that may or may
-    not be matched by ``MANIFEST.in`` or found in source control.
-
-``exclude_package_data``
-    Specify patterns for data files and directories that should *not* be
-    included when a package is installed, even if they would otherwise have
-    been included due to the use of the preceding options.
-
-NOTE: Due to the way the distutils build process works, a data file that you
-include in your project and then stop including may be "orphaned" in your
-project's build directories, requiring you to run ``setup.py clean --all`` to
-fully remove them.  This may also be important for your users and contributors
-if they track intermediate revisions of your project using Subversion; be sure
-to let them know when you make changes that remove files from inclusion so they
-can run ``setup.py clean --all``.
-
-
-Accessing Data Files at Runtime
--------------------------------
-
-Typically, existing programs manipulate a package's ``__file__`` attribute in
-order to find the location of data files.  However, this manipulation isn't
-compatible with PEP 302-based import hooks, including importing from zip files
-and Python Eggs.  It is strongly recommended that, if you are using data files,
-you should use the `Resource Management API`_ of ``pkg_resources`` to access
-them.  The ``pkg_resources`` module is distributed as part of setuptools, so if
-you're using setuptools to distribute your package, there is no reason not to
-use its resource management API.  See also `Accessing Package Resources`_ for
-a quick example of converting code that uses ``__file__`` to use
-``pkg_resources`` instead.
-
-.. _Resource Management API: http://peak.telecommunity.com/DevCenter/PythonEggs#resource-management
-.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources
-
-
-Non-Package Data Files
-----------------------
-
-The ``distutils`` normally install general "data files" to a platform-specific
-location (e.g. ``/usr/share``).  This feature intended to be used for things
-like documentation, example configuration files, and the like.  ``setuptools``
-does not install these data files in a separate location, however.  They are
-bundled inside the egg file or directory, alongside the Python modules and
-packages.  The data files can also be accessed using the `Resource Management
-API`_, by specifying a ``Requirement`` instead of a package name::
-
-    from pkg_resources import Requirement, resource_filename
-    filename = resource_filename(Requirement.parse("MyProject"),"sample.conf")
-
-The above code will obtain the filename of the "sample.conf" file in the data
-root of the "MyProject" distribution.
-
-Note, by the way, that this encapsulation of data files means that you can't
-actually install data files to some arbitrary location on a user's machine;
-this is a feature, not a bug.  You can always include a script in your
-distribution that extracts and copies your the documentation or data files to
-a user-specified location, at their discretion.  If you put related data files
-in a single directory, you can use ``resource_filename()`` with the directory
-name to get a filesystem directory that then can be copied with the ``shutil``
-module.  (Even if your package is installed as a zipfile, calling
-``resource_filename()`` on a directory will return an actual filesystem
-directory, whose contents will be that entire subtree of your distribution.)
-
-(Of course, if you're writing a new package, you can just as easily place your
-data files or directories inside one of your packages, rather than using the
-distutils' approach.  However, if you're updating an existing application, it
-may be simpler not to change the way it currently specifies these data files.)
-
-
-Automatic Resource Extraction
------------------------------
-
-If you are using tools that expect your resources to be "real" files, or your
-project includes non-extension native libraries or other files that your C
-extensions expect to be able to access, you may need to list those files in
-the ``eager_resources`` argument to ``setup()``, so that the files will be
-extracted together, whenever a C extension in the project is imported.
-
-This is especially important if your project includes shared libraries *other*
-than distutils-built C extensions, and those shared libraries use file
-extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the
-extensions that setuptools 0.6a8 and higher automatically detects as shared
-libraries and adds to the ``native_libs.txt`` file for you.  Any shared
-libraries whose names do not end with one of those extensions should be listed
-as ``eager_resources``, because they need to be present in the filesystem when
-he C extensions that link to them are used.
-
-The ``pkg_resources`` runtime for compressed packages will automatically
-extract *all* C extensions and ``eager_resources`` at the same time, whenever
-*any* C extension or eager resource is requested via the ``resource_filename()``
-API.  (C extensions are imported using ``resource_filename()`` internally.)
-This ensures that C extensions will see all of the "real" files that they
-expect to see.
-
-Note also that you can list directory resource names in ``eager_resources`` as
-well, in which case the directory's contents (including subdirectories) will be
-extracted whenever any C extension or eager resource is requested.
-
-Please note that if you're not sure whether you need to use this argument, you
-don't!  It's really intended to support projects with lots of non-Python
-dependencies and as a last resort for crufty projects that can't otherwise
-handle being compressed.  If your package is pure Python, Python plus data
-files, or Python plus C, you really don't need this.  You've got to be using
-either C or an external program that needs "real" files in your project before
-there's any possibility of ``eager_resources`` being relevant to your project.
-
-
-Extensible Applications and Frameworks
-======================================
-
-
-.. _Entry Points:
-
-Dynamic Discovery of Services and Plugins
------------------------------------------
-
-``setuptools`` supports creating libraries that "plug in" to extensible
-applications and frameworks, by letting you register "entry points" in your
-project that can be imported by the application or framework.
-
-For example, suppose that a blogging tool wants to support plugins
-that provide translation for various file types to the blog's output format.
-The framework might define an "entry point group" called ``blogtool.parsers``,
-and then allow plugins to register entry points for the file extensions they
-support.
-
-This would allow people to create distributions that contain one or more
-parsers for different file types, and then the blogging tool would be able to
-find the parsers at runtime by looking up an entry point for the file
-extension (or mime type, or however it wants to).
-
-Note that if the blogging tool includes parsers for certain file formats, it
-can register these as entry points in its own setup script, which means it
-doesn't have to special-case its built-in formats.  They can just be treated
-the same as any other plugin's entry points would be.
-
-If you're creating a project that plugs in to an existing application or
-framework, you'll need to know what entry points or entry point groups are
-defined by that application or framework.  Then, you can register entry points
-in your setup script.  Here are a few examples of ways you might register an
-``.rst`` file parser entry point in the ``blogtool.parsers`` entry point group,
-for our hypothetical blogging tool::
-
-    setup(
-        # ...
-        entry_points = {'blogtool.parsers': '.rst = some_module:SomeClass'}
-    )
-
-    setup(
-        # ...
-        entry_points = {'blogtool.parsers': ['.rst = some_module:a_func']}
-    )
-
-    setup(
-        # ...
-        entry_points = """
-            [blogtool.parsers]
-            .rst = some.nested.module:SomeClass.some_classmethod [reST]
-        """,
-        extras_require = dict(reST = "Docutils>=0.3.5")
-    )
-
-The ``entry_points`` argument to ``setup()`` accepts either a string with
-``.ini``-style sections, or a dictionary mapping entry point group names to
-either strings or lists of strings containing entry point specifiers.  An
-entry point specifier consists of a name and value, separated by an ``=``
-sign.  The value consists of a dotted module name, optionally followed by a
-``:`` and a dotted identifier naming an object within the module.  It can
-also include a bracketed list of "extras" that are required for the entry
-point to be used.  When the invoking application or framework requests loading
-of an entry point, any requirements implied by the associated extras will be
-passed to ``pkg_resources.require()``, so that an appropriate error message
-can be displayed if the needed package(s) are missing.  (Of course, the
-invoking app or framework can ignore such errors if it wants to make an entry
-point optional if a requirement isn't installed.)
-
-
-Defining Additional Metadata
-----------------------------
-
-Some extensible applications and frameworks may need to define their own kinds
-of metadata to include in eggs, which they can then access using the
-``pkg_resources`` metadata APIs.  Ordinarily, this is done by having plugin
-developers include additional files in their ``ProjectName.egg-info``
-directory.  However, since it can be tedious to create such files by hand, you
-may want to create a distutils extension that will create the necessary files
-from arguments to ``setup()``, in much the same way that ``setuptools`` does
-for many of the ``setup()`` arguments it adds.  See the section below on
-`Creating distutils Extensions`_ for more details, especially the subsection on
-`Adding new EGG-INFO Files`_.
-
-
-"Development Mode"
-==================
-
-Under normal circumstances, the ``distutils`` assume that you are going to
-build a distribution of your project, not use it in its "raw" or "unbuilt"
-form.  If you were to use the ``distutils`` that way, you would have to rebuild
-and reinstall your project every time you made a change to it during
-development.
-
-Another problem that sometimes comes up with the ``distutils`` is that you may
-need to do development on two related projects at the same time.  You may need
-to put both projects' packages in the same directory to run them, but need to
-keep them separate for revision control purposes.  How can you do this?
-
-Setuptools allows you to deploy your projects for use in a common directory or
-staging area, but without copying any files.  Thus, you can edit each project's
-code in its checkout directory, and only need to run build commands when you
-change a project's C extensions or similarly compiled files.  You can even
-deploy a project into another project's checkout directory, if that's your
-preferred way of working (as opposed to using a common independent staging area
-or the site-packages directory).
-
-To do this, use the ``setup.py develop`` command.  It works very similarly to
-``setup.py install`` or the EasyInstall tool, except that it doesn't actually
-install anything.  Instead, it creates a special ``.egg-link`` file in the
-deployment directory, that links to your project's source code.  And, if your
-deployment directory is Python's ``site-packages`` directory, it will also
-update the ``easy-install.pth`` file to include your project's source code,
-thereby making it available on ``sys.path`` for all programs using that Python
-installation.
-
-If you have enabled the ``use_2to3`` flag, then of course the ``.egg-link``
-will not link directly to your source code when run under Python 3, since
-that source code would be made for Python 2 and not work under Python 3.
-Instead the ``setup.py develop`` will build Python 3 code under the ``build``
-directory, and link there. This means that after doing code changes you will
-have to run ``setup.py build`` before these changes are picked up by your
-Python 3 installation.
-
-In addition, the ``develop`` command creates wrapper scripts in the target
-script directory that will run your in-development scripts after ensuring that
-all your ``install_requires`` packages are available on ``sys.path``.
-
-You can deploy the same project to multiple staging areas, e.g. if you have
-multiple projects on the same machine that are sharing the same project you're
-doing development work.
-
-When you're done with a given development task, you can remove the project
-source from a staging area using ``setup.py develop --uninstall``, specifying
-the desired staging area if it's not the default.
-
-There are several options to control the precise behavior of the ``develop``
-command; see the section on the `develop`_ command below for more details.
-
-Note that you can also apply setuptools commands to non-setuptools projects,
-using commands like this::
-
-   python -c "import setuptools; execfile('setup.py')" develop
-
-That is, you can simply list the normal setup commands and options following
-the quoted part.
-
-
-Distributing a ``setuptools``-based project
-===========================================
-
-Using ``setuptools``...  Without bundling it!
----------------------------------------------
-
-Your users might not have ``setuptools`` installed on their machines, or even
-if they do, it might not be the right version.  Fixing this is easy; just
-download `distribute_setup.py`_, and put it in the same directory as your ``setup.py``
-script.  (Be sure to add it to your revision control system, too.)  Then add
-these two lines to the very top of your setup script, before the script imports
-anything from setuptools:
-
-.. code-block:: python
-
-    import distribute_setup
-    distribute_setup.use_setuptools()
-
-That's it.  The ``distribute_setup`` module will automatically download a matching
-version of ``setuptools`` from PyPI, if it isn't present on the target system.
-Whenever you install an updated version of setuptools, you should also update
-your projects' ``distribute_setup.py`` files, so that a matching version gets installed
-on the target machine(s).
-
-By the way, setuptools supports the new PyPI "upload" command, so you can use
-``setup.py sdist upload`` or ``setup.py bdist_egg upload`` to upload your
-source or egg distributions respectively.  Your project's current version must
-be registered with PyPI first, of course; you can use ``setup.py register`` to
-do that.  Or you can do it all in one step, e.g. ``setup.py register sdist
-bdist_egg upload`` will register the package, build source and egg
-distributions, and then upload them both to PyPI, where they'll be easily
-found by other projects that depend on them.
-
-(By the way, if you need to distribute a specific version of ``setuptools``,
-you can specify the exact version and base download URL as parameters to the
-``use_setuptools()`` function.  See the function's docstring for details.)
-
-
-What Your Users Should Know
----------------------------
-
-In general, a setuptools-based project looks just like any distutils-based
-project -- as long as your users have an internet connection and are installing
-to ``site-packages``, that is.  But for some users, these conditions don't
-apply, and they may become frustrated if this is their first encounter with
-a setuptools-based project.  To keep these users happy, you should review the
-following topics in your project's installation instructions, if they are
-relevant to your project and your target audience isn't already familiar with
-setuptools and ``easy_install``.
-
-Network Access
-    If your project is using ``distribute_setup``, you should inform users of the
-    need to either have network access, or to preinstall the correct version of
-    setuptools using the `EasyInstall installation instructions`_.  Those
-    instructions also have tips for dealing with firewalls as well as how to
-    manually download and install setuptools.
-
-Custom Installation Locations
-    You should inform your users that if they are installing your project to
-    somewhere other than the main ``site-packages`` directory, they should
-    first install setuptools using the instructions for `Custom Installation
-    Locations`_, before installing your project.
-
-Your Project's Dependencies
-    If your project depends on other projects that may need to be downloaded
-    from PyPI or elsewhere, you should list them in your installation
-    instructions, or tell users how to find out what they are.  While most
-    users will not need this information, any users who don't have unrestricted
-    internet access may have to find, download, and install the other projects
-    manually.  (Note, however, that they must still install those projects
-    using ``easy_install``, or your project will not know they are installed,
-    and your setup script will try to download them again.)
-
-    If you want to be especially friendly to users with limited network access,
-    you may wish to build eggs for your project and its dependencies, making
-    them all available for download from your site, or at least create a page
-    with links to all of the needed eggs.  In this way, users with limited
-    network access can manually download all the eggs to a single directory,
-    then use the ``-f`` option of ``easy_install`` to specify the directory
-    to find eggs in.  Users who have full network access can just use ``-f``
-    with the URL of your download page, and ``easy_install`` will find all the
-    needed eggs using your links directly.  This is also useful when your
-    target audience isn't able to compile packages (e.g. most Windows users)
-    and your package or some of its dependencies include C code.
-
-Subversion or CVS Users and Co-Developers
-    Users and co-developers who are tracking your in-development code using
-    CVS, Subversion, or some other revision control system should probably read
-    this manual's sections regarding such development.  Alternately, you may
-    wish to create a quick-reference guide containing the tips from this manual
-    that apply to your particular situation.  For example, if you recommend
-    that people use ``setup.py develop`` when tracking your in-development
-    code, you should let them know that this needs to be run after every update
-    or commit.
-
-    Similarly, if you remove modules or data files from your project, you
-    should remind them to run ``setup.py clean --all`` and delete any obsolete
-    ``.pyc`` or ``.pyo``.  (This tip applies to the distutils in general, not
-    just setuptools, but not everybody knows about them; be kind to your users
-    by spelling out your project's best practices rather than leaving them
-    guessing.)
-
-Creating System Packages
-    Some users want to manage all Python packages using a single package
-    manager, and sometimes that package manager isn't ``easy_install``!
-    Setuptools currently supports ``bdist_rpm``, ``bdist_wininst``, and
-    ``bdist_dumb`` formats for system packaging.  If a user has a locally-
-    installed "bdist" packaging tool that internally uses the distutils
-    ``install`` command, it should be able to work with ``setuptools``.  Some
-    examples of "bdist" formats that this should work with include the
-    ``bdist_nsi`` and ``bdist_msi`` formats for Windows.
-
-    However, packaging tools that build binary distributions by running
-    ``setup.py install`` on the command line or as a subprocess will require
-    modification to work with setuptools.  They should use the
-    ``--single-version-externally-managed`` option to the ``install`` command,
-    combined with the standard ``--root`` or ``--record`` options.
-    See the `install command`_ documentation below for more details.  The
-    ``bdist_deb`` command is an example of a command that currently requires
-    this kind of patching to work with setuptools.
-
-    If you or your users have a problem building a usable system package for
-    your project, please report the problem via the mailing list so that
-    either the "bdist" tool in question or setuptools can be modified to
-    resolve the issue.
-
-
-
-Managing Multiple Projects
---------------------------
-
-If you're managing several projects that need to use ``distribute_setup``, and you
-are using Subversion as your revision control system, you can use the
-"svn:externals" property to share a single copy of ``distribute_setup`` between
-projects, so that it will always be up-to-date whenever you check out or update
-an individual project, without having to manually update each project to use
-a new version.
-
-However, because Subversion only supports using directories as externals, you
-have to turn ``distribute_setup.py`` into ``distribute_setup/__init__.py`` in order
-to do this, then create "externals" definitions that map the ``distribute_setup``
-directory into each project.  Also, if any of your projects use
-``find_packages()`` on their setup directory, you will need to exclude the
-resulting ``distribute_setup`` package, to keep it from being included in your
-distributions, e.g.::
-
-    setup(
-        ...
-        packages = find_packages(exclude=['distribute_setup']),
-    )
-
-Of course, the ``distribute_setup`` package will still be included in your
-packages' source distributions, as it needs to be.
-
-For your convenience, you may use the following external definition, which will
-track the latest version of setuptools::
-
-    ez_setup svn://svn.eby-sarna.com/svnroot/ez_setup
-
-You can set this by executing this command in your project directory::
-
-    svn propedit svn:externals .
-
-And then adding the line shown above to the file that comes up for editing.
-
-
-Setting the ``zip_safe`` flag
------------------------------
-
-For maximum performance, Python packages are best installed as zip files.
-Not all packages, however, are capable of running in compressed form, because
-they may expect to be able to access either source code or data files as
-normal operating system files.  So, ``setuptools`` can install your project
-as a zipfile or a directory, and its default choice is determined by the
-project's ``zip_safe`` flag.
-
-You can pass a True or False value for the ``zip_safe`` argument to the
-``setup()`` function, or you can omit it.  If you omit it, the ``bdist_egg``
-command will analyze your project's contents to see if it can detect any
-conditions that would prevent it from working in a zipfile.  It will output
-notices to the console about any such conditions that it finds.
-
-Currently, this analysis is extremely conservative: it will consider the
-project unsafe if it contains any C extensions or datafiles whatsoever.  This
-does *not* mean that the project can't or won't work as a zipfile!  It just
-means that the ``bdist_egg`` authors aren't yet comfortable asserting that
-the project *will* work.  If the project contains no C or data files, and does
-no ``__file__`` or ``__path__`` introspection or source code manipulation, then
-there is an extremely solid chance the project will work when installed as a
-zipfile.  (And if the project uses ``pkg_resources`` for all its data file
-access, then C extensions and other data files shouldn't be a problem at all.
-See the `Accessing Data Files at Runtime`_ section above for more information.)
-
-However, if ``bdist_egg`` can't be *sure* that your package will work, but
-you've checked over all the warnings it issued, and you are either satisfied it
-*will* work (or if you want to try it for yourself), then you should set
-``zip_safe`` to ``True`` in your ``setup()`` call.  If it turns out that it
-doesn't work, you can always change it to ``False``, which will force
-``setuptools`` to install your project as a directory rather than as a zipfile.
-
-Of course, the end-user can still override either decision, if they are using
-EasyInstall to install your package.  And, if you want to override for testing
-purposes, you can just run ``setup.py easy_install --zip-ok .`` or ``setup.py
-easy_install --always-unzip .`` in your project directory. to install the
-package as a zipfile or directory, respectively.
-
-In the future, as we gain more experience with different packages and become
-more satisfied with the robustness of the ``pkg_resources`` runtime, the
-"zip safety" analysis may become less conservative.  However, we strongly
-recommend that you determine for yourself whether your project functions
-correctly when installed as a zipfile, correct any problems if you can, and
-then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe``
-flag, so that it will not be necessary for ``bdist_egg`` or ``EasyInstall`` to
-try to guess whether your project can work as a zipfile.
-
-
-Namespace Packages
-------------------
-
-Sometimes, a large package is more useful if distributed as a collection of
-smaller eggs.  However, Python does not normally allow the contents of a
-package to be retrieved from more than one location.  "Namespace packages"
-are a solution for this problem.  When you declare a package to be a namespace
-package, it means that the package has no meaningful contents in its
-``__init__.py``, and that it is merely a container for modules and subpackages.
-
-The ``pkg_resources`` runtime will then automatically ensure that the contents
-of namespace packages that are spread over multiple eggs or directories are
-combined into a single "virtual" package.
-
-The ``namespace_packages`` argument to ``setup()`` lets you declare your
-project's namespace packages, so that they will be included in your project's
-metadata.  The argument should list the namespace packages that the egg
-participates in.  For example, the ZopeInterface project might do this::
-
-    setup(
-        # ...
-        namespace_packages = ['zope']
-    )
-
-because it contains a ``zope.interface`` package that lives in the ``zope``
-namespace package.  Similarly, a project for a standalone ``zope.publisher``
-would also declare the ``zope`` namespace package.  When these projects are
-installed and used, Python will see them both as part of a "virtual" ``zope``
-package, even though they will be installed in different locations.
-
-Namespace packages don't have to be top-level packages.  For example, Zope 3's
-``zope.app`` package is a namespace package, and in the future PEAK's
-``peak.util`` package will be too.
-
-Note, by the way, that your project's source tree must include the namespace
-packages' ``__init__.py`` files (and the ``__init__.py`` of any parent
-packages), in a normal Python package layout.  These ``__init__.py`` files
-*must* contain the line::
-
-    __import__('pkg_resources').declare_namespace(__name__)
-
-This code ensures that the namespace package machinery is operating and that
-the current package is registered as a namespace package.
-
-You must NOT include any other code and data in a namespace package's
-``__init__.py``.  Even though it may appear to work during development, or when
-projects are installed as ``.egg`` files, it will not work when the projects
-are installed using "system" packaging tools -- in such cases the
-``__init__.py`` files will not be installed, let alone executed.
-
-You must include the ``declare_namespace()``  line in the ``__init__.py`` of
-*every* project that has contents for the namespace package in question, in
-order to ensure that the namespace will be declared regardless of which
-project's copy of ``__init__.py`` is loaded first.  If the first loaded
-``__init__.py`` doesn't declare it, it will never *be* declared, because no
-other copies will ever be loaded!)
-
-
-TRANSITIONAL NOTE
-~~~~~~~~~~~~~~~~~
-
-Setuptools 0.6a automatically calls ``declare_namespace()`` for you at runtime,
-but the 0.7a versions will *not*.  This is because the automatic declaration
-feature has some negative side effects, such as needing to import all namespace
-packages during the initialization of the ``pkg_resources`` runtime, and also
-the need for ``pkg_resources`` to be explicitly imported before any namespace
-packages work at all.  Beginning with the 0.7a releases, you'll be responsible
-for including your own declaration lines, and the automatic declaration feature
-will be dropped to get rid of the negative side effects.
-
-During the remainder of the 0.6 development cycle, therefore, setuptools will
-warn you about missing ``declare_namespace()`` calls in your ``__init__.py``
-files, and you should correct these as soon as possible before setuptools 0.7a1
-is released.  Namespace packages without declaration lines will not work
-correctly once a user has upgraded to setuptools 0.7a1, so it's important that
-you make this change now in order to avoid having your code break in the field.
-Our apologies for the inconvenience, and thank you for your patience.
-
-
-
-Tagging and "Daily Build" or "Snapshot" Releases
-------------------------------------------------
-
-When a set of related projects are under development, it may be important to
-track finer-grained version increments than you would normally use for e.g.
-"stable" releases.  While stable releases might be measured in dotted numbers
-with alpha/beta/etc. status codes, development versions of a project often
-need to be tracked by revision or build number or even build date.  This is
-especially true when projects in development need to refer to one another, and
-therefore may literally need an up-to-the-minute version of something!
-
-To support these scenarios, ``setuptools`` allows you to "tag" your source and
-egg distributions by adding one or more of the following to the project's
-"official" version identifier:
-
-* A manually-specified pre-release tag, such as "build" or "dev", or a
-  manually-specified post-release tag, such as a build or revision number
-  (``--tag-build=STRING, -bSTRING``)
-
-* A "last-modified revision number" string generated automatically from
-  Subversion's metadata (assuming your project is being built from a Subversion
-  "working copy")  (``--tag-svn-revision, -r``)
-
-* An 8-character representation of the build date (``--tag-date, -d``), as
-  a postrelease tag
-
-You can add these tags by adding ``egg_info`` and the desired options to
-the command line ahead of the ``sdist`` or ``bdist`` commands that you want
-to generate a daily build or snapshot for.  See the section below on the
-`egg_info`_ command for more details.
-
-(Also, before you release your project, be sure to see the section above on
-`Specifying Your Project's Version`_ for more information about how pre- and
-post-release tags affect how setuptools and EasyInstall interpret version
-numbers.  This is important in order to make sure that dependency processing
-tools will know which versions of your project are newer than others.)
-
-Finally, if you are creating builds frequently, and either building them in a
-downloadable location or are copying them to a distribution server, you should
-probably also check out the `rotate`_ command, which lets you automatically
-delete all but the N most-recently-modified distributions matching a glob
-pattern.  So, you can use a command line like::
-
-    setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
-
-to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the
-most recent Subversion revision that affected the source tree), and then
-delete any egg files from the distribution directory except for the three
-that were built most recently.
-
-If you have to manage automated builds for multiple packages, each with
-different tagging and rotation policies, you may also want to check out the
-`alias`_ command, which would let each package define an alias like ``daily``
-that would perform the necessary tag, build, and rotate commands.  Then, a
-simpler script or cron job could just run ``setup.py daily`` in each project
-directory.  (And, you could also define sitewide or per-user default versions
-of the ``daily`` alias, so that projects that didn't define their own would
-use the appropriate defaults.)
-
-
-Generating Source Distributions
--------------------------------
-
-``setuptools`` enhances the distutils' default algorithm for source file
-selection, so that all files managed by CVS or Subversion in your project tree
-are included in any source distribution you build.  This is a big improvement
-over having to manually write a ``MANIFEST.in`` file and try to keep it in
-sync with your project.  So, if you are using CVS or Subversion, and your
-source distributions only need to include files that you're tracking in
-revision control, don't create a a ``MANIFEST.in`` file for your project.
-(And, if you already have one, you might consider deleting it the next time
-you would otherwise have to change it.)
-
-(NOTE: other revision control systems besides CVS and Subversion can be
-supported using plugins; see the section below on `Adding Support for Other
-Revision Control Systems`_ for information on how to write such plugins.)
-
-If you need to include automatically generated files, or files that are kept in
-an unsupported revision control system, you'll need to create a ``MANIFEST.in``
-file to specify any files that the default file location algorithm doesn't
-catch.  See the distutils documentation for more information on the format of
-the ``MANIFEST.in`` file.
-
-But, be sure to ignore any part of the distutils documentation that deals with
-``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you
-from these issues and doesn't work the same way in any case.  Unlike the
-distutils, setuptools regenerates the source distribution manifest file
-every time you build a source distribution, and it builds it inside the
-project's ``.egg-info`` directory, out of the way of your main project
-directory.  You therefore need not worry about whether it is up-to-date or not.
-
-Indeed, because setuptools' approach to determining the contents of a source
-distribution is so much simpler, its ``sdist`` command omits nearly all of
-the options that the distutils' more complex ``sdist`` process requires.  For
-all practical purposes, you'll probably use only the ``--formats`` option, if
-you use any option at all.
-
-(By the way, if you're using some other revision control system, you might
-consider creating and publishing a `revision control plugin for setuptools`_.)
-
-
-.. _revision control plugin for setuptools: `Adding Support for Other Revision Control Systems`_
-
-
-Making your package available for EasyInstall
----------------------------------------------
-
-If you use the ``register`` command (``setup.py register``) to register your
-package with PyPI, that's most of the battle right there.  (See the
-`docs for the register command`_ for more details.)
-
-.. _docs for the register command: http://docs.python.org/dist/package-index.html
-
-If you also use the `upload`_ command to upload actual distributions of your
-package, that's even better, because EasyInstall will be able to find and
-download them directly from your project's PyPI page.
-
-However, there may be reasons why you don't want to upload distributions to
-PyPI, and just want your existing distributions (or perhaps a Subversion
-checkout) to be used instead.
-
-So here's what you need to do before running the ``register`` command.  There
-are three ``setup()`` arguments that affect EasyInstall:
-
-``url`` and ``download_url``
-   These become links on your project's PyPI page.  EasyInstall will examine
-   them to see if they link to a package ("primary links"), or whether they are
-   HTML pages.  If they're HTML pages, EasyInstall scans all HREF's on the
-   page for primary links
-
-``long_description``
-   EasyInstall will check any URLs contained in this argument to see if they
-   are primary links.
-
-A URL is considered a "primary link" if it is a link to a .tar.gz, .tgz, .zip,
-.egg, .egg.zip, .tar.bz2, or .exe file, or if it has an ``#egg=project`` or
-``#egg=project-version`` fragment identifier attached to it.  EasyInstall
-attempts to determine a project name and optional version number from the text
-of a primary link *without* downloading it.  When it has found all the primary
-links, EasyInstall will select the best match based on requested version,
-platform compatibility, and other criteria.
-
-So, if your ``url`` or ``download_url`` point either directly to a downloadable
-source distribution, or to HTML page(s) that have direct links to such, then
-EasyInstall will be able to locate downloads automatically.  If you want to
-make Subversion checkouts available, then you should create links with either
-``#egg=project`` or ``#egg=project-version`` added to the URL.  You should
-replace ``project`` and ``version`` with the values they would have in an egg
-filename.  (Be sure to actually generate an egg and then use the initial part
-of the filename, rather than trying to guess what the escaped form of the
-project name and version number will be.)
-
-Note that Subversion checkout links are of lower precedence than other kinds
-of distributions, so EasyInstall will not select a Subversion checkout for
-downloading unless it has a version included in the ``#egg=`` suffix, and
-it's a higher version than EasyInstall has seen in any other links for your
-project.
-
-As a result, it's a common practice to use mark checkout URLs with a version of
-"dev" (i.e., ``#egg=projectname-dev``), so that users can do something like
-this::
-
-    easy_install --editable projectname==dev
-
-in order to check out the in-development version of ``projectname``.
-
-
-Managing "Continuous Releases" Using Subversion
------------------------------------------------
-
-If you expect your users to track in-development versions of your project via
-Subversion, there are a few additional steps you should take to ensure that
-things work smoothly with EasyInstall.  First, you should add the following
-to your project's ``setup.cfg`` file:
-
-.. code-block:: ini
-
-    [egg_info]
-    tag_build = .dev
-    tag_svn_revision = 1
-
-This will tell ``setuptools`` to generate package version numbers like
-``1.0a1.dev-r1263``, which will be considered to be an *older* release than
-``1.0a1``.  Thus, when you actually release ``1.0a1``, the entire egg
-infrastructure (including ``setuptools``, ``pkg_resources`` and EasyInstall)
-will know that ``1.0a1`` supersedes any interim snapshots from Subversion, and
-handle upgrades accordingly.
-
-(Note: the project version number you specify in ``setup.py`` should always be
-the *next* version of your software, not the last released version.
-Alternately, you can leave out the ``tag_build=.dev``, and always use the
-*last* release as a version number, so that your post-1.0 builds are labelled
-``1.0-r1263``, indicating a post-1.0 patchlevel.  Most projects so far,
-however, seem to prefer to think of their project as being a future version
-still under development, rather than a past version being patched.  It is of
-course possible for a single project to have both situations, using
-post-release numbering on release branches, and pre-release numbering on the
-trunk.  But you don't have to make things this complex if you don't want to.)
-
-Commonly, projects releasing code from Subversion will include a PyPI link to
-their checkout URL (as described in the previous section) with an
-``#egg=projectname-dev`` suffix.  This allows users to request EasyInstall
-to download ``projectname==dev`` in order to get the latest in-development
-code.  Note that if your project depends on such in-progress code, you may wish
-to specify your ``install_requires`` (or other requirements) to include
-``==dev``, e.g.:
-
-.. code-block:: python
-
-    install_requires = ["OtherProject>=0.2a1.dev-r143,==dev"]
-
-The above example says, "I really want at least this particular development
-revision number, but feel free to follow and use an ``#egg=OtherProject-dev``
-link if you find one".  This avoids the need to have actual source or binary
-distribution snapshots of in-development code available, just to be able to
-depend on the latest and greatest a project has to offer.
-
-A final note for Subversion development: if you are using SVN revision tags
-as described in this section, it's a good idea to run ``setup.py develop``
-after each Subversion checkin or update, because your project's version number
-will be changing, and your script wrappers need to be updated accordingly.
-
-Also, if the project's requirements have changed, the ``develop`` command will
-take care of fetching the updated dependencies, building changed extensions,
-etc.  Be sure to also remind any of your users who check out your project
-from Subversion that they need to run ``setup.py develop`` after every update
-in order to keep their checkout completely in sync.
-
-
-Making "Official" (Non-Snapshot) Releases
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When you make an official release, creating source or binary distributions,
-you will need to override the tag settings from ``setup.cfg``, so that you
-don't end up registering versions like ``foobar-0.7a1.dev-r34832``.  This is
-easy to do if you are developing on the trunk and using tags or branches for
-your releases - just make the change to ``setup.cfg`` after branching or
-tagging the release, so the trunk will still produce development snapshots.
-
-Alternately, if you are not branching for releases, you can override the
-default version options on the command line, using something like::
-
-    python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-The first part of this command (``egg_info -RDb ""``) will override the
-configured tag information, before creating source and binary eggs, registering
-the project with PyPI, and uploading the files.  Thus, these commands will use
-the plain version from your ``setup.py``, without adding the Subversion
-revision number or build designation string.
-
-Of course, if you will be doing this a lot, you may wish to create a personal
-alias for this operation, e.g.::
-
-    python setup.py alias -u release egg_info -RDb ""
-
-You can then use it like this::
-
-    python setup.py release sdist bdist_egg register upload
-
-Or of course you can create more elaborate aliases that do all of the above.
-See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
-
-
-
-Distributing Extensions compiled with Pyrex
--------------------------------------------
-
-``setuptools`` includes transparent support for building Pyrex extensions, as
-long as you define your extensions using ``setuptools.Extension``, *not*
-``distutils.Extension``.  You must also not import anything from Pyrex in
-your setup script.
-
-If you follow these rules, you can safely list ``.pyx`` files as the source
-of your ``Extension`` objects in the setup script.  ``setuptools`` will detect
-at build time whether Pyrex is installed or not.  If it is, then ``setuptools``
-will use it.  If not, then ``setuptools`` will silently change the
-``Extension`` objects to refer to the ``.c`` counterparts of the ``.pyx``
-files, so that the normal distutils C compilation process will occur.
-
-Of course, for this to work, your source distributions must include the C
-code generated by Pyrex, as well as your original ``.pyx`` files.  This means
-that you will probably want to include current ``.c`` files in your revision
-control system, rebuilding them whenever you check changes in for the ``.pyx``
-source files.  This will ensure that people tracking your project in CVS or
-Subversion will be able to build it even if they don't have Pyrex installed,
-and that your source releases will be similarly usable with or without Pyrex.
-
-
------------------
-Command Reference
------------------
-
-.. _alias:
-
-``alias`` - Define shortcuts for commonly used commands
-=======================================================
-
-Sometimes, you need to use the same commands over and over, but you can't
-necessarily set them as defaults.  For example, if you produce both development
-snapshot releases and "stable" releases of a project, you may want to put
-the distributions in different places, or use different ``egg_info`` tagging
-options, etc.  In these cases, it doesn't make sense to set the options in
-a distutils configuration file, because the values of the options changed based
-on what you're trying to do.
-
-Setuptools therefore allows you to define "aliases" - shortcut names for
-an arbitrary string of commands and options, using ``setup.py alias aliasname
-expansion``, where aliasname is the name of the new alias, and the remainder of
-the command line supplies its expansion.  For example, this command defines
-a sitewide alias called "daily", that sets various ``egg_info`` tagging
-options::
-
-    setup.py alias --global-config daily egg_info --tag-svn-revision \
-        --tag-build=development
-
-Once the alias is defined, it can then be used with other setup commands,
-e.g.::
-
-    setup.py daily bdist_egg        # generate a daily-build .egg file
-    setup.py daily sdist            # generate a daily-build source distro
-    setup.py daily sdist bdist_egg  # generate both
-
-The above commands are interpreted as if the word ``daily`` were replaced with
-``egg_info --tag-svn-revision --tag-build=development``.
-
-Note that setuptools will expand each alias *at most once* in a given command
-line.  This serves two purposes.  First, if you accidentally create an alias
-loop, it will have no effect; you'll instead get an error message about an
-unknown command.  Second, it allows you to define an alias for a command, that
-uses that command.  For example, this (project-local) alias::
-
-    setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg
-
-redefines the ``bdist_egg`` command so that it always runs the ``rotate``
-command afterwards to delete all but the newest egg file.  It doesn't loop
-indefinitely on ``bdist_egg`` because the alias is only expanded once when
-used.
-
-You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.::
-
-    setup.py alias --global-config --remove daily
-
-would delete the "daily" alias we defined above.
-
-Aliases can be defined on a project-specific, per-user, or sitewide basis.  The
-default is to define or remove a project-specific alias, but you can use any of
-the `configuration file options`_ (listed under the `saveopts`_ command, below)
-to determine which distutils configuration file an aliases will be added to
-(or removed from).
-
-Note that if you omit the "expansion" argument to the ``alias`` command,
-you'll get output showing that alias' current definition (and what
-configuration file it's defined in).  If you omit the alias name as well,
-you'll get a listing of all current aliases along with their configuration
-file locations.
-
-
-``bdist_egg`` - Create a Python Egg for the project
-===================================================
-
-This command generates a Python Egg (``.egg`` file) for the project.  Python
-Eggs are the preferred binary distribution format for EasyInstall, because they
-are cross-platform (for "pure" packages), directly importable, and contain
-project metadata including scripts and information about the project's
-dependencies.  They can be simply downloaded and added to ``sys.path``
-directly, or they can be placed in a directory on ``sys.path`` and then
-automatically discovered by the egg runtime system.
-
-This command runs the `egg_info`_ command (if it hasn't already run) to update
-the project's metadata (``.egg-info``) directory.  If you have added any extra
-metadata files to the ``.egg-info`` directory, those files will be included in
-the new egg file's metadata directory, for use by the egg runtime system or by
-any applications or frameworks that use that metadata.
-
-You won't usually need to specify any special options for this command; just
-use ``bdist_egg`` and you're done.  But there are a few options that may
-be occasionally useful:
-
-``--dist-dir=DIR, -d DIR``
-    Set the directory where the ``.egg`` file will be placed.  If you don't
-    supply this, then the ``--dist-dir`` setting of the ``bdist`` command
-    will be used, which is usually a directory named ``dist`` in the project
-    directory.
-
-``--plat-name=PLATFORM, -p PLATFORM``
-    Set the platform name string that will be embedded in the egg's filename
-    (assuming the egg contains C extensions).  This can be used to override
-    the distutils default platform name with something more meaningful.  Keep
-    in mind, however, that the egg runtime system expects to see eggs with
-    distutils platform names, so it may ignore or reject eggs with non-standard
-    platform names.  Similarly, the EasyInstall program may ignore them when
-    searching web pages for download links.  However, if you are
-    cross-compiling or doing some other unusual things, you might find a use
-    for this option.
-
-``--exclude-source-files``
-    Don't include any modules' ``.py`` files in the egg, just compiled Python,
-    C, and data files.  (Note that this doesn't affect any ``.py`` files in the
-    EGG-INFO directory or its subdirectories, since for example there may be
-    scripts with a ``.py`` extension which must still be retained.)  We don't
-    recommend that you use this option except for packages that are being
-    bundled for proprietary end-user applications, or for "embedded" scenarios
-    where space is at an absolute premium.  On the other hand, if your package
-    is going to be installed and used in compressed form, you might as well
-    exclude the source because Python's ``traceback`` module doesn't currently
-    understand how to display zipped source code anyway, or how to deal with
-    files that are in a different place from where their code was compiled.
-
-There are also some options you will probably never need, but which are there
-because they were copied from similar ``bdist`` commands used as an example for
-creating this one.  They may be useful for testing and debugging, however,
-which is why we kept them:
-
-``--keep-temp, -k``
-    Keep the contents of the ``--bdist-dir`` tree around after creating the
-    ``.egg`` file.
-
-``--bdist-dir=DIR, -b DIR``
-    Set the temporary directory for creating the distribution.  The entire
-    contents of this directory are zipped to create the ``.egg`` file, after
-    running various installation commands to copy the package's modules, data,
-    and extensions here.
-
-``--skip-build``
-    Skip doing any "build" commands; just go straight to the
-    install-and-compress phases.
-
-
-.. _develop:
-
-``develop`` - Deploy the project source in "Development Mode"
-=============================================================
-
-This command allows you to deploy your project's source for use in one or more
-"staging areas" where it will be available for importing.  This deployment is
-done in such a way that changes to the project source are immediately available
-in the staging area(s), without needing to run a build or install step after
-each change.
-
-The ``develop`` command works by creating an ``.egg-link`` file (named for the
-project) in the given staging area.  If the staging area is Python's
-``site-packages`` directory, it also updates an ``easy-install.pth`` file so
-that the project is on ``sys.path`` by default for all programs run using that
-Python installation.
-
-The ``develop`` command also installs wrapper scripts in the staging area (or
-a separate directory, as specified) that will ensure the project's dependencies
-are available on ``sys.path`` before running the project's source scripts.
-And, it ensures that any missing project dependencies are available in the
-staging area, by downloading and installing them if necessary.
-
-Last, but not least, the ``develop`` command invokes the ``build_ext -i``
-command to ensure any C extensions in the project have been built and are
-up-to-date, and the ``egg_info`` command to ensure the project's metadata is
-updated (so that the runtime and wrappers know what the project's dependencies
-are).  If you make any changes to the project's setup script or C extensions,
-you should rerun the ``develop`` command against all relevant staging areas to
-keep the project's scripts, metadata and extensions up-to-date.  Most other
-kinds of changes to your project should not require any build operations or
-rerunning ``develop``, but keep in mind that even minor changes to the setup
-script (e.g. changing an entry point definition) require you to re-run the
-``develop`` or ``test`` commands to keep the distribution updated.
-
-Here are some of the options that the ``develop`` command accepts.  Note that
-they affect the project's dependencies as well as the project itself, so if you
-have dependencies that need to be installed and you use ``--exclude-scripts``
-(for example), the dependencies' scripts will not be installed either!  For
-this reason, you may want to use EasyInstall to install the project's
-dependencies before using the ``develop`` command, if you need finer control
-over the installation options for dependencies.
-
-``--uninstall, -u``
-    Un-deploy the current project.  You may use the ``--install-dir`` or ``-d``
-    option to designate the staging area.  The created ``.egg-link`` file will
-    be removed, if present and it is still pointing to the project directory.
-    The project directory will be removed from ``easy-install.pth`` if the
-    staging area is Python's ``site-packages`` directory.
-
-    Note that this option currently does *not* uninstall script wrappers!  You
-    must uninstall them yourself, or overwrite them by using EasyInstall to
-    activate a different version of the package.  You can also avoid installing
-    script wrappers in the first place, if you use the ``--exclude-scripts``
-    (aka ``-x``) option when you run ``develop`` to deploy the project.
-
-``--multi-version, -m``
-    "Multi-version" mode. Specifying this option prevents ``develop`` from
-    adding an ``easy-install.pth`` entry for the project(s) being deployed, and
-    if an entry for any version of a project already exists, the entry will be
-    removed upon successful deployment.  In multi-version mode, no specific
-    version of the package is available for importing, unless you use
-    ``pkg_resources.require()`` to put it on ``sys.path``, or you are running
-    a wrapper script generated by ``setuptools`` or EasyInstall.  (In which
-    case the wrapper script calls ``require()`` for you.)
-
-    Note that if you install to a directory other than ``site-packages``,
-    this option is automatically in effect, because ``.pth`` files can only be
-    used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use
-    the ``--install-dir`` or ``-d`` option (or they are set via configuration
-    file(s)) your project and its dependencies will be deployed in multi-
-    version mode.
-
-``--install-dir=DIR, -d DIR``
-    Set the installation directory (staging area).  If this option is not
-    directly specified on the command line or in a distutils configuration
-    file, the distutils default installation location is used.  Normally, this
-    will be the ``site-packages`` directory, but if you are using distutils
-    configuration files, setting things like ``prefix`` or ``install_lib``,
-    then those settings are taken into account when computing the default
-    staging area.
-
-``--script-dir=DIR, -s DIR``
-    Set the script installation directory.  If you don't supply this option
-    (via the command line or a configuration file), but you *have* supplied
-    an ``--install-dir`` (via command line or config file), then this option
-    defaults to the same directory, so that the scripts will be able to find
-    their associated package installation.  Otherwise, this setting defaults
-    to the location where the distutils would normally install scripts, taking
-    any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
-    Don't deploy script wrappers.  This is useful if you don't want to disturb
-    existing versions of the scripts in the staging area.
-
-``--always-copy, -a``
-    Copy all needed distributions to the staging area, even if they
-    are already present in another directory on ``sys.path``.  By default, if
-    a requirement can be met using a distribution that is already available in
-    a directory on ``sys.path``, it will not be copied to the staging area.
-
-``--egg-path=DIR``
-    Force the generated ``.egg-link`` file to use a specified relative path
-    to the source directory.  This can be useful in circumstances where your
-    installation directory is being shared by code running under multiple
-    platforms (e.g. Mac and Windows) which have different absolute locations
-    for the code under development, but the same *relative* locations with
-    respect to the installation directory.  If you use this option when
-    installing, you must supply the same relative path when uninstalling.
-
-In addition to the above options, the ``develop`` command also accepts all of
-the same options accepted by ``easy_install``.  If you've configured any
-``easy_install`` settings in your ``setup.cfg`` (or other distutils config
-files), the ``develop`` command will use them as defaults, unless you override
-them in a ``[develop]`` section or on the command line.
-
-
-``easy_install`` - Find and install packages
-============================================
-
-This command runs the `EasyInstall tool
-<http://peak.telecommunity.com/DevCenter/EasyInstall>`_ for you.  It is exactly
-equivalent to running the ``easy_install`` command.  All command line arguments
-following this command are consumed and not processed further by the distutils,
-so this must be the last command listed on the command line.  Please see
-the EasyInstall documentation for the options reference and usage examples.
-Normally, there is no reason to use this command via the command line, as you
-can just use ``easy_install`` directly.  It's only listed here so that you know
-it's a distutils command, which means that you can:
-
-* create command aliases that use it,
-* create distutils extensions that invoke it as a subcommand, and
-* configure options for it in your ``setup.cfg`` or other distutils config
-  files.
-
-
-.. _egg_info:
-
-``egg_info`` - Create egg metadata and set build tags
-=====================================================
-
-This command performs two operations: it updates a project's ``.egg-info``
-metadata directory (used by the ``bdist_egg``, ``develop``, and ``test``
-commands), and it allows you to temporarily change a project's version string,
-to support "daily builds" or "snapshot" releases.  It is run automatically by
-the ``sdist``, ``bdist_egg``, ``develop``, ``register``, and ``test`` commands
-in order to update the project's metadata, but you can also specify it
-explicitly in order to temporarily change the project's version string while
-executing other commands.  (It also generates the``.egg-info/SOURCES.txt``
-manifest file, which is used when you are building source distributions.)
-
-In addition to writing the core egg metadata defined by ``setuptools`` and
-required by ``pkg_resources``, this command can be extended to write other
-metadata files as well, by defining entry points in the ``egg_info.writers``
-group.  See the section on `Adding new EGG-INFO Files`_ below for more details.
-Note that using additional metadata writers may require you to include a
-``setup_requires`` argument to ``setup()`` in order to ensure that the desired
-writers are available on ``sys.path``.
-
-
-Release Tagging Options
------------------------
-
-The following options can be used to modify the project's version string for
-all remaining commands on the setup command line.  The options are processed
-in the order shown, so if you use more than one, the requested tags will be
-added in the following order:
-
-``--tag-build=NAME, -b NAME``
-    Append NAME to the project's version string.  Due to the way setuptools
-    processes "pre-release" version suffixes beginning with the letters "a"
-    through "e" (like "alpha", "beta", and "candidate"), you will usually want
-    to use a tag like ".build" or ".dev", as this will cause the version number
-    to be considered *lower* than the project's default version.  (If you
-    want to make the version number *higher* than the default version, you can
-    always leave off --tag-build and then use one or both of the following
-    options.)
-
-    If you have a default build tag set in your ``setup.cfg``, you can suppress
-    it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument
-    to the ``egg_info`` command.
-
-``--tag-svn-revision, -r``
-    If the current directory is a Subversion checkout (i.e. has a ``.svn``
-    subdirectory, this appends a string of the form "-rNNNN" to the project's
-    version string, where NNNN is the revision number of the most recent
-    modification to the current directory, as obtained from the ``svn info``
-    command.
-
-    If the current directory is not a Subversion checkout, the command will
-    look for a ``PKG-INFO`` file instead, and try to find the revision number
-    from that, by looking for a "-rNNNN" string at the end of the version
-    number.  (This is so that building a package from a source distribution of
-    a Subversion snapshot will produce a binary with the correct version
-    number.)
-
-    If there is no ``PKG-INFO`` file, or the version number contained therein
-    does not end with ``-r`` and a number, then ``-r0`` is used.
-
-``--no-svn-revision, -R``
-    Don't include the Subversion revision in the version number.  This option
-    is included so you can override a default setting put in ``setup.cfg``.
-
-``--tag-date, -d``
-    Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the
-    project's version number.
-
-``--no-date, -D``
-    Don't include a date stamp in the version number.  This option is included
-    so you can override a default setting in ``setup.cfg``.
-
-
-(Note: Because these options modify the version number used for source and
-binary distributions of your project, you should first make sure that you know
-how the resulting version numbers will be interpreted by automated tools
-like EasyInstall.  See the section above on `Specifying Your Project's
-Version`_ for an explanation of pre- and post-release tags, as well as tips on
-how to choose and verify a versioning scheme for your your project.)
-
-For advanced uses, there is one other option that can be set, to change the
-location of the project's ``.egg-info`` directory.  Commands that need to find
-the project's source directory or metadata should get it from this setting:
-
-
-Other ``egg_info`` Options
---------------------------
-
-``--egg-base=SOURCEDIR, -e SOURCEDIR``
-    Specify the directory that should contain the .egg-info directory.  This
-    should normally be the root of your project's source tree (which is not
-    necessarily the same as your project directory; some projects use a ``src``
-    or ``lib`` subdirectory as the source root).  You should not normally need
-    to specify this directory, as it is normally determined from the
-    ``package_dir`` argument to the ``setup()`` function, if any.  If there is
-    no ``package_dir`` set, this option defaults to the current directory.
-
-
-``egg_info`` Examples
----------------------
-
-Creating a dated "nightly build" snapshot egg::
-
-    python setup.py egg_info --tag-date --tag-build=DEV bdist_egg
-
-Creating and uploading a release with no version tags, even if some default
-tags are specified in ``setup.cfg``::
-
-    python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-(Notice that ``egg_info`` must always appear on the command line *before* any
-commands that you want the version changes to apply to.)
-
-
-.. _install command:
-
-``install`` - Run ``easy_install`` or old-style installation
-============================================================
-
-The setuptools ``install`` command is basically a shortcut to run the
-``easy_install`` command on the current project.  However, for convenience
-in creating "system packages" of setuptools-based projects, you can also
-use this option:
-
-``--single-version-externally-managed``
-    This boolean option tells the ``install`` command to perform an "old style"
-    installation, with the addition of an ``.egg-info`` directory so that the
-    installed project will still have its metadata available and operate
-    normally.  If you use this option, you *must* also specify the ``--root``
-    or ``--record`` options (or both), because otherwise you will have no way
-    to identify and remove the installed files.
-
-This option is automatically in effect when ``install`` is invoked by another
-distutils command, so that commands like ``bdist_wininst`` and ``bdist_rpm``
-will create system packages of eggs.  It is also automatically in effect if
-you specify the ``--root`` option.
-
-
-``install_egg_info`` - Install an ``.egg-info`` directory in ``site-packages``
-==============================================================================
-
-Setuptools runs this command as part of ``install`` operations that use the
-``--single-version-externally-managed`` options.  You should not invoke it
-directly; it is documented here for completeness and so that distutils
-extensions such as system package builders can make use of it.  This command
-has only one option:
-
-``--install-dir=DIR, -d DIR``
-    The parent directory where the ``.egg-info`` directory will be placed.
-    Defaults to the same as the ``--install-dir`` option specified for the
-    ``install_lib`` command, which is usually the system ``site-packages``
-    directory.
-
-This command assumes that the ``egg_info`` command has been given valid options
-via the command line or ``setup.cfg``, as it will invoke the ``egg_info``
-command and use its options to locate the project's source ``.egg-info``
-directory.
-
-
-.. _rotate:
-
-``rotate`` - Delete outdated distribution files
-===============================================
-
-As you develop new versions of your project, your distribution (``dist``)
-directory will gradually fill up with older source and/or binary distribution
-files.  The ``rotate`` command lets you automatically clean these up, keeping
-only the N most-recently modified files matching a given pattern.
-
-``--match=PATTERNLIST, -m PATTERNLIST``
-    Comma-separated list of glob patterns to match.  This option is *required*.
-    The project name and ``-*`` is prepended to the supplied patterns, in order
-    to match only distributions belonging to the current project (in case you
-    have a shared distribution directory for multiple projects).  Typically,
-    you will use a glob pattern like ``.zip`` or ``.egg`` to match files of
-    the specified type.  Note that each supplied pattern is treated as a
-    distinct group of files for purposes of selecting files to delete.
-
-``--keep=COUNT, -k COUNT``
-    Number of matching distributions to keep.  For each group of files
-    identified by a pattern specified with the ``--match`` option, delete all
-    but the COUNT most-recently-modified files in that group.  This option is
-    *required*.
-
-``--dist-dir=DIR, -d DIR``
-    Directory where the distributions are.  This defaults to the value of the
-    ``bdist`` command's ``--dist-dir`` option, which will usually be the
-    project's ``dist`` subdirectory.
-
-**Example 1**: Delete all .tar.gz files from the distribution directory, except
-for the 3 most recently modified ones::
-
-    setup.py rotate --match=.tar.gz --keep=3
-
-**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution
-directory, except the most recently modified one for each Python version::
-
-    setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1
-
-
-.. _saveopts:
-
-``saveopts`` - Save used options to a configuration file
-========================================================
-
-Finding and editing ``distutils`` configuration files can be a pain, especially
-since you also have to translate the configuration options from command-line
-form to the proper configuration file format.  You can avoid these hassles by
-using the ``saveopts`` command.  Just add it to the command line to save the
-options you used.  For example, this command builds the project using
-the ``mingw32`` C compiler, then saves the --compiler setting as the default
-for future builds (even those run implicitly by the ``install`` command)::
-
-    setup.py build --compiler=mingw32 saveopts
-
-The ``saveopts`` command saves all options for every commmand specified on the
-command line to the project's local ``setup.cfg`` file, unless you use one of
-the `configuration file options`_ to change where the options are saved.  For
-example, this command does the same as above, but saves the compiler setting
-to the site-wide (global) distutils configuration::
-
-    setup.py build --compiler=mingw32 saveopts -g
-
-Note that it doesn't matter where you place the ``saveopts`` command on the
-command line; it will still save all the options specified for all commands.
-For example, this is another valid way to spell the last example::
-
-    setup.py saveopts -g build --compiler=mingw32
-
-Note, however, that all of the commands specified are always run, regardless of
-where ``saveopts`` is placed on the command line.
-
-
-Configuration File Options
---------------------------
-
-Normally, settings such as options and aliases are saved to the project's
-local ``setup.cfg`` file.  But you can override this and save them to the
-global or per-user configuration files, or to a manually-specified filename.
-
-``--global-config, -g``
-    Save settings to the global ``distutils.cfg`` file inside the ``distutils``
-    package directory.  You must have write access to that directory to use
-    this option.  You also can't combine this option with ``-u`` or ``-f``.
-
-``--user-config, -u``
-    Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or
-    ``$HOME/pydistutils.cfg`` (Windows) file.  You can't combine this option
-    with ``-g`` or ``-f``.
-
-``--filename=FILENAME, -f FILENAME``
-    Save settings to the specified configuration file to use.  You can't
-    combine this option with ``-g`` or ``-u``.  Note that if you specify a
-    non-standard filename, the ``distutils`` and ``setuptools`` will not
-    use the file's contents.  This option is mainly included for use in
-    testing.
-
-These options are used by other ``setuptools`` commands that modify
-configuration files, such as the `alias`_ and `setopt`_ commands.
-
-
-.. _setopt:
-
-``setopt`` - Set a distutils or setuptools option in a config file
-==================================================================
-
-This command is mainly for use by scripts, but it can also be used as a quick
-and dirty way to change a distutils configuration option without having to
-remember what file the options are in and then open an editor.
-
-**Example 1**.  Set the default C compiler to ``mingw32`` (using long option
-names)::
-
-    setup.py setopt --command=build --option=compiler --set-value=mingw32
-
-**Example 2**.  Remove any setting for the distutils default package
-installation directory (short option names)::
-
-    setup.py setopt -c install -o install_lib -r
-
-
-Options for the ``setopt`` command:
-
-``--command=COMMAND, -c COMMAND``
-    Command to set the option for.  This option is required.
-
-``--option=OPTION, -o OPTION``
-    The name of the option to set.  This option is required.
-
-``--set-value=VALUE, -s VALUE``
-    The value to set the option to.  Not needed if ``-r`` or ``--remove`` is
-    set.
-
-``--remove, -r``
-    Remove (unset) the option, instead of setting it.
-
-In addition to the above options, you may use any of the `configuration file
-options`_ (listed under the `saveopts`_ command, above) to determine which
-distutils configuration file the option will be added to (or removed from).
-
-
-.. _test:
-
-``test`` - Build package and run a unittest suite
-=================================================
-
-When doing test-driven development, or running automated builds that need
-testing before they are deployed for downloading or use, it's often useful
-to be able to run a project's unit tests without actually deploying the project
-anywhere, even using the ``develop`` command.  The ``test`` command runs a
-project's unit tests without actually deploying it, by temporarily putting the
-project's source on ``sys.path``, after first running ``build_ext -i`` and
-``egg_info`` to ensure that any C extensions and project metadata are
-up-to-date.
-
-To use this command, your project's tests must be wrapped in a ``unittest``
-test suite by either a function, a ``TestCase`` class or method, or a module
-or package containing ``TestCase`` classes.  If the named suite is a module,
-and the module has an ``additional_tests()`` function, it is called and the
-result (which must be a ``unittest.TestSuite``) is added to the tests to be
-run.  If the named suite is a package, any submodules and subpackages are
-recursively added to the overall test suite.  (Note: if your project specifies
-a ``test_loader``, the rules for processing the chosen ``test_suite`` may
-differ; see the `test_loader`_ documentation for more details.)
-
-Note that many test systems including ``doctest`` support wrapping their
-non-``unittest`` tests in ``TestSuite`` objects.  So, if you are using a test
-package that does not support this, we suggest you encourage its developers to
-implement test suite support, as this is a convenient and standard way to
-aggregate a collection of tests to be run under a common test harness.
-
-By default, tests will be run in the "verbose" mode of the ``unittest``
-package's text test runner, but you can get the "quiet" mode (just dots) if
-you supply the ``-q`` or ``--quiet`` option, either as a global option to
-the setup script (e.g. ``setup.py -q test``) or as an option for the ``test``
-command itself (e.g. ``setup.py test -q``).  There is one other option
-available:
-
-``--test-suite=NAME, -s NAME``
-    Specify the test suite (or module, class, or method) to be run
-    (e.g. ``some_module.test_suite``).  The default for this option can be
-    set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.::
-
-        setup(
-            # ...
-            test_suite = "my_package.tests.test_all"
-        )
-
-    If you did not set a ``test_suite`` in your ``setup()`` call, and do not
-    provide a ``--test-suite`` option, an error will occur.
-
-
-.. _upload:
-
-``upload`` - Upload source and/or egg distributions to PyPI
-===========================================================
-
-PyPI now supports uploading project files for redistribution; uploaded files
-are easily found by EasyInstall, even if you don't have download links on your
-project's home page.
-
-Although Python 2.5 will support uploading all types of distributions to PyPI,
-setuptools only supports source distributions and eggs.  (This is partly
-because PyPI's upload support is currently broken for various other file
-types.)  To upload files, you must include the ``upload`` command *after* the
-``sdist`` or ``bdist_egg`` commands on the setup command line.  For example::
-
-    setup.py bdist_egg upload         # create an egg and upload it
-    setup.py sdist upload             # create a source distro and upload it
-    setup.py sdist bdist_egg upload   # create and upload both
-
-Note that to upload files for a project, the corresponding version must already
-be registered with PyPI, using the distutils ``register`` command.  It's
-usually a good idea to include the ``register`` command at the start of the
-command line, so that any registration problems can be found and fixed before
-building and uploading the distributions, e.g.::
-
-    setup.py register sdist bdist_egg upload
-
-This will update PyPI's listing for your project's current version.
-
-Note, by the way, that the metadata in your ``setup()`` call determines what
-will be listed in PyPI for your package.  Try to fill out as much of it as
-possible, as it will save you a lot of trouble manually adding and updating
-your PyPI listings.  Just put it in ``setup.py`` and use the ``register``
-comamnd to keep PyPI up to date.
-
-The ``upload`` command has a few options worth noting:
-
-``--sign, -s``
-    Sign each uploaded file using GPG (GNU Privacy Guard).  The ``gpg`` program
-    must be available for execution on the system ``PATH``.
-
-``--identity=NAME, -i NAME``
-    Specify the identity or key name for GPG to use when signing.  The value of
-    this option will be passed through the ``--local-user`` option of the
-    ``gpg`` program.
-
-``--show-response``
-    Display the full response text from server; this is useful for debugging
-    PyPI problems.
-
-``--repository=URL, -r URL``
-    The URL of the repository to upload to.  Defaults to
-    http://pypi.python.org/pypi (i.e., the main PyPI installation).
-
-.. _upload_docs:
-
-``upload_docs`` - Upload package documentation to PyPI
-======================================================
-
-PyPI now supports uploading project documentation to the dedicated URL
-http://packages.python.org/<project>/.
-
-The ``upload_docs`` command will create the necessary zip file out of a
-documentation directory and will post to the repository.
-
-Note that to upload the documentation of a project, the corresponding version
-must already be registered with PyPI, using the distutils ``register``
-command -- just like the ``upload`` command.
-
-Assuming there is an ``Example`` project with documentation in the
-subdirectory ``docs``, e.g.::
-
-  Example/
-  |-- example.py
-  |-- setup.cfg
-  |-- setup.py
-  |-- docs
-  |   |-- build
-  |   |   `-- html
-  |   |   |   |-- index.html
-  |   |   |   `-- tips_tricks.html
-  |   |-- conf.py
-  |   |-- index.txt
-  |   `-- tips_tricks.txt
-
-You can simply pass the documentation directory path to the ``upload_docs``
-command::
-
-    python setup.py upload_docs --upload-dir=docs/build/html
-
-If no ``--upload-dir`` is given, ``upload_docs`` will attempt to run the
-``build_sphinx`` command to generate uploadable documentation.
-For the command to become available, `Sphinx <http://sphinx.pocoo.org/>`_
-must be installed in the same environment as distribute.
-
-As with other ``setuptools``-based commands, you can define useful
-defaults in the ``setup.cfg`` of your Python project, e.g.:
-
-.. code-block:: ini
-
-    [upload_docs]
-    upload-dir = docs/build/html
-
-The ``upload_docs`` command has the following options:
-
-``--upload-dir``
-    The directory to be uploaded to the repository.
-
-``--show-response``
-    Display the full response text from server; this is useful for debugging
-    PyPI problems.
-
-``--repository=URL, -r URL``
-    The URL of the repository to upload to.  Defaults to
-    http://pypi.python.org/pypi (i.e., the main PyPI installation).
-
-
---------------------------------
-Extending and Reusing Distribute
---------------------------------
-
-Creating ``distutils`` Extensions
-=================================
-
-It can be hard to add new commands or setup arguments to the distutils.  But
-the ``setuptools`` package makes it a bit easier, by allowing you to distribute
-a distutils extension as a separate project, and then have projects that need
-the extension just refer to it in their ``setup_requires`` argument.
-
-With ``setuptools``, your distutils extension projects can hook in new
-commands and ``setup()`` arguments just by defining "entry points".  These
-are mappings from command or argument names to a specification of where to
-import a handler from.  (See the section on `Dynamic Discovery of Services and
-Plugins`_ above for some more background on entry points.)
-
-
-Adding Commands
----------------
-
-You can add new ``setup`` commands by defining entry points in the
-``distutils.commands`` group.  For example, if you wanted to add a ``foo``
-command, you might add something like this to your distutils extension
-project's setup script::
-
-    setup(
-        # ...
-        entry_points = {
-            "distutils.commands": [
-                "foo = mypackage.some_module:foo",
-            ],
-        },
-    )
-
-(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is
-a ``setuptools.Command`` subclass.)
-
-Once a project containing such entry points has been activated on ``sys.path``,
-(e.g. by running "install" or "develop" with a site-packages installation
-directory) the command(s) will be available to any ``setuptools``-based setup
-scripts.  It is not necessary to use the ``--command-packages`` option or
-to monkeypatch the ``distutils.command`` package to install your commands;
-``setuptools`` automatically adds a wrapper to the distutils to search for
-entry points in the active distributions on ``sys.path``.  In fact, this is
-how setuptools' own commands are installed: the setuptools project's setup
-script defines entry points for them!
-
-
-Adding ``setup()`` Arguments
-----------------------------
-
-Sometimes, your commands may need additional arguments to the ``setup()``
-call.  You can enable this by defining entry points in the
-``distutils.setup_keywords`` group.  For example, if you wanted a ``setup()``
-argument called ``bar_baz``, you might add something like this to your
-distutils extension project's setup script::
-
-    setup(
-        # ...
-        entry_points = {
-            "distutils.commands": [
-                "foo = mypackage.some_module:foo",
-            ],
-            "distutils.setup_keywords": [
-                "bar_baz = mypackage.some_module:validate_bar_baz",
-            ],
-        },
-    )
-
-The idea here is that the entry point defines a function that will be called
-to validate the ``setup()`` argument, if it's supplied.  The ``Distribution``
-object will have the initial value of the attribute set to ``None``, and the
-validation function will only be called if the ``setup()`` call sets it to
-a non-None value.  Here's an example validation function::
-
-    def assert_bool(dist, attr, value):
-        """Verify that value is True, False, 0, or 1"""
-        if bool(value) != value:
-            raise DistutilsSetupError(
-                "%r must be a boolean value (got %r)" % (attr,value)
-            )
-
-Your function should accept three arguments: the ``Distribution`` object,
-the attribute name, and the attribute value.  It should raise a
-``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument
-is invalid.  Remember, your function will only be called with non-None values,
-and the default value of arguments defined this way is always None.  So, your
-commands should always be prepared for the possibility that the attribute will
-be ``None`` when they access it later.
-
-If more than one active distribution defines an entry point for the same
-``setup()`` argument, *all* of them will be called.  This allows multiple
-distutils extensions to define a common argument, as long as they agree on
-what values of that argument are valid.
-
-Also note that as with commands, it is not necessary to subclass or monkeypatch
-the distutils ``Distribution`` class in order to add your arguments; it is
-sufficient to define the entry points in your extension, as long as any setup
-script using your extension lists your project in its ``setup_requires``
-argument.
-
-
-Adding new EGG-INFO Files
--------------------------
-
-Some extensible applications or frameworks may want to allow third parties to
-develop plugins with application or framework-specific metadata included in
-the plugins' EGG-INFO directory, for easy access via the ``pkg_resources``
-metadata API.  The easiest way to allow this is to create a distutils extension
-to be used from the plugin projects' setup scripts (via ``setup_requires``)
-that defines a new setup keyword, and then uses that data to write an EGG-INFO
-file when the ``egg_info`` command is run.
-
-The ``egg_info`` command looks for extension points in an ``egg_info.writers``
-group, and calls them to write the files.  Here's a simple example of a
-distutils extension defining a setup argument ``foo_bar``, which is a list of
-lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any
-project that uses the argument::
-
-    setup(
-        # ...
-        entry_points = {
-            "distutils.setup_keywords": [
-                "foo_bar = setuptools.dist:assert_string_list",
-            ],
-            "egg_info.writers": [
-                "foo_bar.txt = setuptools.command.egg_info:write_arg",
-            ],
-        },
-    )
-
-This simple example makes use of two utility functions defined by setuptools
-for its own use: a routine to validate that a setup keyword is a sequence of
-strings, and another one that looks up a setup argument and writes it to
-a file.  Here's what the writer utility looks like::
-
-    def write_arg(cmd, basename, filename):
-        argname = os.path.splitext(basename)[0]
-        value = getattr(cmd.distribution, argname, None)
-        if value is not None:
-            value = '\n'.join(value)+'\n'
-        cmd.write_or_delete_file(argname, filename, value)
-
-As you can see, ``egg_info.writers`` entry points must be a function taking
-three arguments: a ``egg_info`` command instance, the basename of the file to
-write (e.g. ``foo_bar.txt``), and the actual full filename that should be
-written to.
-
-In general, writer functions should honor the command object's ``dry_run``
-setting when writing files, and use the ``distutils.log`` object to do any
-console output.  The easiest way to conform to this requirement is to use
-the ``cmd`` object's ``write_file()``, ``delete_file()``, and
-``write_or_delete_file()`` methods exclusively for your file operations.  See
-those methods' docstrings for more details.
-
-
-Adding Support for Other Revision Control Systems
--------------------------------------------------
-
-If you would like to create a plugin for ``setuptools`` to find files in other
-source control systems besides CVS and Subversion, you can do so by adding an
-entry point to the ``setuptools.file_finders`` group.  The entry point should
-be a function accepting a single directory name, and should yield
-all the filenames within that directory (and any subdirectories thereof) that
-are under revision control.
-
-For example, if you were going to create a plugin for a revision control system
-called "foobar", you would write a function something like this:
-
-.. code-block:: python
-
-    def find_files_for_foobar(dirname):
-        # loop to yield paths that start with `dirname`
-
-And you would register it in a setup script using something like this::
-
-    entry_points = {
-        "setuptools.file_finders": [
-            "foobar = my_foobar_module:find_files_for_foobar"
-        ]
-    }
-
-Then, anyone who wants to use your plugin can simply install it, and their
-local setuptools installation will be able to find the necessary files.
-
-It is not necessary to distribute source control plugins with projects that
-simply use the other source control system, or to specify the plugins in
-``setup_requires``.  When you create a source distribution with the ``sdist``
-command, setuptools automatically records what files were found in the
-``SOURCES.txt`` file.  That way, recipients of source distributions don't need
-to have revision control at all.  However, if someone is working on a package
-by checking out with that system, they will need the same plugin(s) that the
-original author is using.
-
-A few important points for writing revision control file finders:
-
-* Your finder function MUST return relative paths, created by appending to the
-  passed-in directory name.  Absolute paths are NOT allowed, nor are relative
-  paths that reference a parent directory of the passed-in directory.
-
-* Your finder function MUST accept an empty string as the directory name,
-  meaning the current directory.  You MUST NOT convert this to a dot; just
-  yield relative paths.  So, yielding a subdirectory named ``some/dir`` under
-  the current directory should NOT be rendered as ``./some/dir`` or
-  ``/somewhere/some/dir``, but *always* as simply ``some/dir``
-
-* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
-  with the absence of needed programs (i.e., ones belonging to the revision
-  control system itself.  It *may*, however, use ``distutils.log.warn()`` to
-  inform the user of the missing program(s).
-
-
-Subclassing ``Command``
------------------------
-
-Sorry, this section isn't written yet, and neither is a lot of what's below
-this point, except for the change log.  You might want to `subscribe to changes
-in this page <setuptools?action=subscribe>`_ to see when new documentation is
-added or updated.
-
-XXX
-
-
-Reusing ``setuptools`` Code
-===========================
-
-``distribute_setup``
---------------------
-
-XXX
-
-
-``setuptools.archive_util``
----------------------------
-
-XXX
-
-
-``setuptools.sandbox``
-----------------------
-
-XXX
-
-
-``setuptools.package_index``
-----------------------------
-
-XXX
-
-History
-=======
-
-0.6c9
- * Fixed a missing files problem when using Windows source distributions on
-   non-Windows platforms, due to distutils not handling manifest file line
-   endings correctly.
-
- * Updated Pyrex support to work with Pyrex 0.9.6 and higher.
-
- * Minor changes for Jython compatibility, including skipping tests that can't
-   work on Jython.
-
- * Fixed not installing eggs in ``install_requires`` if they were also used for
-   ``setup_requires`` or ``tests_require``.
-
- * Fixed not fetching eggs in ``install_requires`` when running tests.
-
- * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools
-   installations when called from a standalone ``setup.py``.
-
- * Added a warning if a namespace package is declared, but its parent package
-   is not also declared as a namespace.
-
- * Support Subversion 1.5
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice
-
- * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's
-   ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``.
-
- * Ensure that _full_name is set on all shared libs before extensions are
-   checked for shared lib usage.  (Fixes a bug in the experimental shared
-   library build support.)
-
- * Fix to allow unpacked eggs containing native libraries to fail more
-   gracefully under Google App Engine (with an ``ImportError`` loading the
-   C-based module, instead of getting a ``NameError``).
-
-0.6c7
- * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and
-   ``egg_info`` command failing on new, uncommitted SVN directories.
-
- * Fix import problems with nested namespace packages installed via
-   ``--root`` or ``--single-version-externally-managed``, due to the
-   parent package not having the child package as an attribute.
-
-0.6c6
- * Added ``--egg-path`` option to ``develop`` command, allowing you to force
-   ``.egg-link`` files to use relative paths (allowing them to be shared across
-   platforms on a networked drive).
-
- * Fix not building binary RPMs correctly.
-
- * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with
-   bash-compatible shells.
-
- * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there
-   was whitespace inside a quoted argument or at the end of the ``#!`` line
-   (a regression introduced in 0.6c4).
-
- * Fix ``test`` command possibly failing if an older version of the project
-   being tested was installed on ``sys.path`` ahead of the test source
-   directory.
-
- * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in
-   their names as packages.
-
-0.6c5
- * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg``
-   packages under Python versions less than 2.5.
-
- * Fix uploaded ``bdist_wininst`` packages being described as suitable for
-   "any" version by Python 2.5, even if a ``--target-version`` was specified.
-
-0.6c4
- * Overhauled Windows script wrapping to support ``bdist_wininst`` better.
-   Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or
-   ``#!pythonw.exe`` as the executable name (even when built on non-Windows
-   platforms!), and the wrappers will look for the executable in the script's
-   parent directory (which should find the right version of Python).
-
- * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or
-   ``bdist_wininst`` under Python 2.3 and 2.4.
-
- * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is
-   prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish
-   platforms.  (This is mainly so that setuptools itself can have a single-file
-   installer on Unix, without doing multiple downloads, dealing with firewalls,
-   etc.)
-
- * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files
-
- * Use cross-platform relative paths in ``easy-install.pth`` when doing
-   ``develop`` and the source directory is a subdirectory of the installation
-   target directory.
-
- * Fix a problem installing eggs with a system packaging tool if the project
-   contained an implicit namespace package; for example if the ``setup()``
-   listed a namespace package ``foo.bar`` without explicitly listing ``foo``
-   as a namespace package.
-
-0.6c3
- * Fixed breakages caused by Subversion 1.4's new "working copy" format
-
-0.6c2
- * The ``ez_setup`` module displays the conflicting version of setuptools (and
-   its installation location) when a script requests a version that's not
-   available.
-
- * Running ``setup.py develop`` on a setuptools-using project will now install
-   setuptools if needed, instead of only downloading the egg.
-
-0.6c1
- * Fixed ``AttributeError`` when trying to download a ``setup_requires``
-   dependency when a distribution lacks a ``dependency_links`` setting.
-
- * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so
-   as to play better with packaging tools that complain about zero-length
-   files.
-
- * Made ``setup.py develop`` respect the ``--no-deps`` option, which it
-   previously was ignoring.
-
- * Support ``extra_path`` option to ``setup()`` when ``install`` is run in
-   backward-compatibility mode.
-
- * Source distributions now always include a ``setup.cfg`` file that explicitly
-   sets ``egg_info`` options such that they produce an identical version number
-   to the source distribution's version number.  (Previously, the default
-   version number could be different due to the use of ``--tag-date``, or if
-   the version was overridden on the command line that built the source
-   distribution.)
-
-0.6b4
- * Fix ``register`` not obeying name/version set by ``egg_info`` command, if
-   ``egg_info`` wasn't explicitly run first on the same command line.
-
- * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info``
-   command, to allow suppressing tags configured in ``setup.cfg``.
-
- * Fixed redundant warnings about missing ``README`` file(s); it should now
-   appear only if you are actually a source distribution.
-
-0.6b3
- * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``.
-
- * Allow ``.py`` files found by the ``include_package_data`` option to be
-   automatically included.  Remove duplicate data file matches if both
-   ``include_package_data`` and ``package_data`` are used to refer to the same
-   files.
-
-0.6b1
- * Strip ``module`` from the end of compiled extension modules when computing
-   the name of a ``.py`` loader/wrapper.  (Python's import machinery ignores
-   this suffix when searching for an extension module.)
-
-0.6a11
- * Added ``test_loader`` keyword to support custom test loaders
-
- * Added ``setuptools.file_finders`` entry point group to allow implementing
-   revision control plugins.
-
- * Added ``--identity`` option to ``upload`` command.
-
- * Added ``dependency_links`` to allow specifying URLs for ``--find-links``.
-
- * Enhanced test loader to scan packages as well as modules, and call
-   ``additional_tests()`` if present to get non-unittest tests.
-
- * Support namespace packages in conjunction with system packagers, by omitting
-   the installation of any ``__init__.py`` files for namespace packages, and
-   adding a special ``.pth`` file to create a working package in
-   ``sys.modules``.
-
- * Made ``--single-version-externally-managed`` automatic when ``--root`` is
-   used, so that most system packagers won't require special support for
-   setuptools.
-
- * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or
-   other configuration files for their option defaults when installing, and
-   also made the install use ``--multi-version`` mode so that the project
-   directory doesn't need to support .pth files.
-
- * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading
-   it.  Previously, the file could be left open and the actual error would be
-   masked by problems trying to remove the open file on Windows systems.
-
-0.6a10
- * Fixed the ``develop`` command ignoring ``--find-links``.
-
-0.6a9
- * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to
-   create source distributions.  ``MANIFEST.in`` is still read and processed,
-   as are the standard defaults and pruning.  But the manifest is built inside
-   the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt
-   every time the ``egg_info`` command is run.
-
- * Added the ``include_package_data`` keyword to ``setup()``, allowing you to
-   automatically include any package data listed in revision control or
-   ``MANIFEST.in``
-
- * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to
-   trim back files included via the ``package_data`` and
-   ``include_package_data`` options.
-
- * Fixed ``--tag-svn-revision`` not working when run from a source
-   distribution.
-
- * Added warning for namespace packages with missing ``declare_namespace()``
-
- * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages
-   requiring ``nose`` to run unit tests can make this dependency optional
-   unless the ``test`` command is run.
-
- * Made all commands that use ``easy_install`` respect its configuration
-   options, as this was causing some problems with ``setup.py install``.
-
- * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so
-   that you can process a directory tree through a processing filter as if it
-   were a zipfile or tarfile.
-
- * Added an internal ``install_egg_info`` command to use as part of old-style
-   ``install`` operations, that installs an ``.egg-info`` directory with the
-   package.
-
- * Added a ``--single-version-externally-managed`` option to the ``install``
-   command so that you can more easily wrap a "flat" egg in a system package.
-
- * Enhanced ``bdist_rpm`` so that it installs single-version eggs that
-   don't rely on a ``.pth`` file.  The ``--no-egg`` option has been removed,
-   since all RPMs are now built in a more backwards-compatible format.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
-   format.  Running ``bdist_wininst`` on a setuptools-based package wraps the
-   egg in an .exe that will safely install it as an egg (i.e., with metadata
-   and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
-   back into an ``.egg`` file or directory and install it as such.
-
-
-0.6a8
- * Fixed some problems building extensions when Pyrex was installed, especially
-   with Python 2.4 and/or packages using SWIG.
-
- * Made ``develop`` command accept all the same options as ``easy_install``,
-   and use the ``easy_install`` command's configuration settings as defaults.
-
- * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision
-   number from ``PKG-INFO`` in case it is being run on a source distribution of
-   a snapshot taken from a Subversion-based project.
-
- * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being
-   installed as data, adding them to ``native_libs.txt`` automatically.
-
- * Fixed some problems with fresh checkouts of projects that don't include
-   ``.egg-info/PKG-INFO`` under revision control and put the project's source
-   code directly in the project directory.  If such a package had any
-   requirements that get processed before the ``egg_info`` command can be run,
-   the setup scripts would fail with a "Missing 'Version:' header and/or
-   PKG-INFO file" error, because the egg runtime interpreted the unbuilt
-   metadata in a directory on ``sys.path`` (i.e. the current directory) as
-   being a corrupted egg.  Setuptools now monkeypatches the distribution
-   metadata cache to pretend that the egg has valid version information, until
-   it has a chance to make it actually be so (via the ``egg_info`` command).
-
-0.6a5
- * Fixed missing gui/cli .exe files in distribution.  Fixed bugs in tests.
-
-0.6a3
- * Added ``gui_scripts`` entry point group to allow installing GUI scripts
-   on Windows and other platforms.  (The special handling is only for Windows;
-   other platforms are treated the same as for ``console_scripts``.)
-
-0.6a2
- * Added ``console_scripts`` entry point group to allow installing scripts
-   without the need to create separate script files.  On Windows, console
-   scripts get an ``.exe`` wrapper so you can just type their name.  On other
-   platforms, the scripts are written without a file extension.
-
-0.6a1
- * Added support for building "old-style" RPMs that don't install an egg for
-   the target package, using a ``--no-egg`` option.
-
- * The ``build_ext`` command now works better when using the ``--inplace``
-   option and multiple Python versions.  It now makes sure that all extensions
-   match the current Python version, even if newer copies were built for a
-   different Python version.
-
- * The ``upload`` command no longer attaches an extra ``.zip`` when uploading
-   eggs, as PyPI now supports egg uploads without trickery.
-
- * The ``ez_setup`` script/module now displays a warning before downloading
-   the setuptools egg, and attempts to check the downloaded egg against an
-   internal MD5 checksum table.
-
- * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the
-   latest revision number; it was using the revision number of the directory
-   containing ``setup.py``, not the highest revision number in the project.
-
- * Added ``eager_resources`` setup argument
-
- * The ``sdist`` command now recognizes Subversion "deleted file" entries and
-   does not include them in source distributions.
-
- * ``setuptools`` now embeds itself more thoroughly into the distutils, so that
-   other distutils extensions (e.g. py2exe, py2app) will subclass setuptools'
-   versions of things, rather than the native distutils ones.
-
- * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``;
-   ``setup_requires`` allows you to automatically find and download packages
-   that are needed in order to *build* your project (as opposed to running it).
-
- * ``setuptools`` now finds its commands, ``setup()`` argument validators, and
-   metadata writers using entry points, so that they can be extended by
-   third-party packages.  See `Creating distutils Extensions`_ above for more
-   details.
-
- * The vestigial ``depends`` command has been removed.  It was never finished
-   or documented, and never would have worked without EasyInstall - which it
-   pre-dated and was never compatible with.
-
-0.5a12
- * The zip-safety scanner now checks for modules that might be used with
-   ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't
-   handle ``-m`` on zipped modules.
-
-0.5a11
- * Fix breakage of the "develop" command that was caused by the addition of
-   ``--always-unzip`` to the ``easy_install`` command.
-
-0.5a9
- * Include ``svn:externals`` directories in source distributions as well as
-   normal subversion-controlled files and directories.
-
- * Added ``exclude=patternlist`` option to ``setuptools.find_packages()``
-
- * Changed --tag-svn-revision to include an "r" in front of the revision number
-   for better readability.
-
- * Added ability to build eggs without including source files (except for any
-   scripts, of course), using the ``--exclude-source-files`` option to
-   ``bdist_egg``.
-
- * ``setup.py install`` now automatically detects when an "unmanaged" package
-   or module is going to be on ``sys.path`` ahead of a package being installed,
-   thereby preventing the newer version from being imported.  If this occurs,
-   a warning message is output to ``sys.stderr``, but installation proceeds
-   anyway.  The warning message informs the user what files or directories
-   need deleting, and advises them they can also use EasyInstall (with the
-   ``--delete-conflicting`` option) to do it automatically.
-
- * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata
-   directory that lists all top-level modules and packages in the distribution.
-   This is used by the ``easy_install`` command to find possibly-conflicting
-   "unmanaged" packages when installing the distribution.
-
- * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``.
-   Added package analysis to determine zip-safety if the ``zip_safe`` flag
-   is not given, and advise the author regarding what code might need changing.
-
- * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``.
-
-0.5a8
- * The "egg_info" command now always sets the distribution metadata to "safe"
-   forms of the distribution name and version, so that distribution files will
-   be generated with parseable names (i.e., ones that don't include '-' in the
-   name or version).  Also, this means that if you use the various ``--tag``
-   options of "egg_info", any distributions generated will use the tags in the
-   version, not just egg distributions.
-
- * Added support for defining command aliases in distutils configuration files,
-   under the "[aliases]" section.  To prevent recursion and to allow aliases to
-   call the command of the same name, a given alias can be expanded only once
-   per command-line invocation.  You can define new aliases with the "alias"
-   command, either for the local, global, or per-user configuration.
-
- * Added "rotate" command to delete old distribution files, given a set of
-   patterns to match and the number of files to keep.  (Keeps the most
-   recently-modified distribution files matching each pattern.)
-
- * Added "saveopts" command that saves all command-line options for the current
-   invocation to the local, global, or per-user configuration file.  Useful for
-   setting defaults without having to hand-edit a configuration file.
-
- * Added a "setopt" command that sets a single option in a specified distutils
-   configuration file.
-
-0.5a7
- * Added "upload" support for egg and source distributions, including a bug
-   fix for "upload" and a temporary workaround for lack of .egg support in
-   PyPI.
-
-0.5a6
- * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it
-   will include all files under revision control (CVS or Subversion) in the
-   current directory, and it will regenerate the list every time you create a
-   source distribution, not just when you tell it to.  This should make the
-   default "do what you mean" more often than the distutils' default behavior
-   did, while still retaining the old behavior in the presence of MANIFEST.in.
-
- * Fixed the "develop" command always updating .pth files, even if you
-   specified ``-n`` or ``--dry-run``.
-
- * Slightly changed the format of the generated version when you use
-   ``--tag-build`` on the "egg_info" command, so that you can make tagged
-   revisions compare *lower* than the version specified in setup.py (e.g. by
-   using ``--tag-build=dev``).
-
-0.5a5
- * Added ``develop`` command to ``setuptools``-based packages.  This command
-   installs an ``.egg-link`` pointing to the package's source directory, and
-   script wrappers that ``execfile()`` the source versions of the package's
-   scripts.  This lets you put your development checkout(s) on sys.path without
-   having to actually install them.  (To uninstall the link, use
-   use ``setup.py develop --uninstall``.)
-
- * Added ``egg_info`` command to ``setuptools``-based packages.  This command
-   just creates or updates the "projectname.egg-info" directory, without
-   building an egg.  (It's used by the ``bdist_egg``, ``test``, and ``develop``
-   commands.)
-
- * Enhanced the ``test`` command so that it doesn't install the package, but
-   instead builds any C extensions in-place, updates the ``.egg-info``
-   metadata, adds the source directory to ``sys.path``, and runs the tests
-   directly on the source.  This avoids an "unmanaged" installation of the
-   package to ``site-packages`` or elsewhere.
-
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
-   the ``easy_install`` module to ``setuptools.command.easy_install``.  Note
-   that if you were importing or extending it, you must now change your imports
-   accordingly.  ``easy_install.py`` is still installed as a script, but not as
-   a module.
-
-0.5a4
- * Setup scripts using setuptools can now list their dependencies directly in
-   the setup.py file, without having to manually create a ``depends.txt`` file.
-   The ``install_requires`` and ``extras_require`` arguments to ``setup()``
-   are used to create a dependencies file automatically.  If you are manually
-   creating ``depends.txt`` right now, please switch to using these setup
-   arguments as soon as practical, because ``depends.txt`` support will be
-   removed in the 0.6 release cycle.  For documentation on the new arguments,
-   see the ``setuptools.dist.Distribution`` class.
-
- * Setup scripts using setuptools now always install using ``easy_install``
-   internally, for ease of uninstallation and upgrading.
-
-0.5a1
- * Added support for "self-installation" bootstrapping.  Packages can now
-   include ``ez_setup.py`` in their source distribution, and add the following
-   to their ``setup.py``, in order to automatically bootstrap installation of
-   setuptools as part of their setup process::
-
-    from ez_setup import use_setuptools
-    use_setuptools()
-
-    from setuptools import setup
-    # etc...
-
-0.4a2
- * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools
-   installation easier, and to allow distributions using setuptools to avoid
-   having to include setuptools in their source distribution.
-
- * All downloads are now managed by the ``PackageIndex`` class (which is now
-   subclassable and replaceable), so that embedders can more easily override
-   download logic, give download progress reports, etc.  The class has also
-   been moved to the new ``setuptools.package_index`` module.
-
- * The ``Installer`` class no longer handles downloading, manages a temporary
-   directory, or tracks the ``zip_ok`` option.  Downloading is now handled
-   by ``PackageIndex``, and ``Installer`` has become an ``easy_install``
-   command class based on ``setuptools.Command``.
-
- * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup
-   script in a directory sandbox, and a new ``setuptools.archive_util`` module
-   with an ``unpack_archive()`` API.  These were split out of EasyInstall to
-   allow reuse by other tools and applications.
-
- * ``setuptools.Command`` now supports reinitializing commands using keyword
-   arguments to set/reset options.  Also, ``Command`` subclasses can now set
-   their ``command_consumes_arguments`` attribute to ``True`` in order to
-   receive an ``args`` option containing the rest of the command line.
-
-0.3a2
- * Added new options to ``bdist_egg`` to allow tagging the egg's version number
-   with a subversion revision number, the current date, or an explicit tag
-   value.  Run ``setup.py bdist_egg --help`` to get more information.
-
- * Misc. bug fixes
-
-0.3a1
- * Initial release.
-
-Mailing List and Bug Tracker
-============================
-
-Please use the `distutils-sig mailing list`_ for questions and discussion about
-setuptools, and the `setuptools bug tracker`_ ONLY for issues you have
-confirmed via the list are actual bugs, and which you have reduced to a minimal
-set of steps to reproduce.
-
-.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
-.. _setuptools bug tracker: http://bugs.python.org/setuptools/
-
diff --git a/vendor/distribute-0.6.34/docs/build/html/_sources/using.txt b/vendor/distribute-0.6.34/docs/build/html/_sources/using.txt
deleted file mode 100644
index 192f1dc234a30dc1351c7f3cb7accd10e439ae72..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_sources/using.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-================================
-Using Distribute in your project
-================================
-
-To use Distribute in your project, the recommended way is to ship
-`distribute_setup.py` alongside your `setup.py` script and call 
-it at the very begining of `setup.py` like this::
-
-    from distribute_setup import use_setuptools
-    use_setuptools()
-
-Another way is to add ``Distribute`` in the ``install_requires`` option::
-
-    from setuptools import setup
-
-    setup(...
-          install_requires=['distribute']
-    )
-
-
-XXX to be finished
diff --git a/vendor/distribute-0.6.34/docs/build/html/_static/basic.css b/vendor/distribute-0.6.34/docs/build/html/_static/basic.css
deleted file mode 100644
index 43e8bafaf35879a519818ef2157ad9687fb21413..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_static/basic.css
+++ /dev/null
@@ -1,540 +0,0 @@
-/*
- * basic.css
- * ~~~~~~~~~
- *
- * Sphinx stylesheet -- basic theme.
- *
- * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
- * :license: BSD, see LICENSE for details.
- *
- */
-
-/* -- main layout ----------------------------------------------------------- */
-
-div.clearer {
-    clear: both;
-}
-
-/* -- relbar ---------------------------------------------------------------- */
-
-div.related {
-    width: 100%;
-    font-size: 90%;
-}
-
-div.related h3 {
-    display: none;
-}
-
-div.related ul {
-    margin: 0;
-    padding: 0 0 0 10px;
-    list-style: none;
-}
-
-div.related li {
-    display: inline;
-}
-
-div.related li.right {
-    float: right;
-    margin-right: 5px;
-}
-
-/* -- sidebar --------------------------------------------------------------- */
-
-div.sphinxsidebarwrapper {
-    padding: 10px 5px 0 10px;
-}
-
-div.sphinxsidebar {
-    float: left;
-    width: 230px;
-    margin-left: -100%;
-    font-size: 90%;
-}
-
-div.sphinxsidebar ul {
-    list-style: none;
-}
-
-div.sphinxsidebar ul ul,
-div.sphinxsidebar ul.want-points {
-    margin-left: 20px;
-    list-style: square;
-}
-
-div.sphinxsidebar ul ul {
-    margin-top: 0;
-    margin-bottom: 0;
-}
-
-div.sphinxsidebar form {
-    margin-top: 10px;
-}
-
-div.sphinxsidebar input {
-    border: 1px solid #98dbcc;
-    font-family: sans-serif;
-    font-size: 1em;
-}
-
-div.sphinxsidebar #searchbox input[type="text"] {
-    width: 170px;
-}
-
-div.sphinxsidebar #searchbox input[type="submit"] {
-    width: 30px;
-}
-
-img {
-    border: 0;
-}
-
-/* -- search page ----------------------------------------------------------- */
-
-ul.search {
-    margin: 10px 0 0 20px;
-    padding: 0;
-}
-
-ul.search li {
-    padding: 5px 0 5px 20px;
-    background-image: url(file.png);
-    background-repeat: no-repeat;
-    background-position: 0 7px;
-}
-
-ul.search li a {
-    font-weight: bold;
-}
-
-ul.search li div.context {
-    color: #888;
-    margin: 2px 0 0 30px;
-    text-align: left;
-}
-
-ul.keywordmatches li.goodmatch a {
-    font-weight: bold;
-}
-
-/* -- index page ------------------------------------------------------------ */
-
-table.contentstable {
-    width: 90%;
-}
-
-table.contentstable p.biglink {
-    line-height: 150%;
-}
-
-a.biglink {
-    font-size: 1.3em;
-}
-
-span.linkdescr {
-    font-style: italic;
-    padding-top: 5px;
-    font-size: 90%;
-}
-
-/* -- general index --------------------------------------------------------- */
-
-table.indextable {
-    width: 100%;
-}
-
-table.indextable td {
-    text-align: left;
-    vertical-align: top;
-}
-
-table.indextable dl, table.indextable dd {
-    margin-top: 0;
-    margin-bottom: 0;
-}
-
-table.indextable tr.pcap {
-    height: 10px;
-}
-
-table.indextable tr.cap {
-    margin-top: 10px;
-    background-color: #f2f2f2;
-}
-
-img.toggler {
-    margin-right: 3px;
-    margin-top: 3px;
-    cursor: pointer;
-}
-
-div.modindex-jumpbox {
-    border-top: 1px solid #ddd;
-    border-bottom: 1px solid #ddd;
-    margin: 1em 0 1em 0;
-    padding: 0.4em;
-}
-
-div.genindex-jumpbox {
-    border-top: 1px solid #ddd;
-    border-bottom: 1px solid #ddd;
-    margin: 1em 0 1em 0;
-    padding: 0.4em;
-}
-
-/* -- general body styles --------------------------------------------------- */
-
-a.headerlink {
-    visibility: hidden;
-}
-
-h1:hover > a.headerlink,
-h2:hover > a.headerlink,
-h3:hover > a.headerlink,
-h4:hover > a.headerlink,
-h5:hover > a.headerlink,
-h6:hover > a.headerlink,
-dt:hover > a.headerlink {
-    visibility: visible;
-}
-
-div.body p.caption {
-    text-align: inherit;
-}
-
-div.body td {
-    text-align: left;
-}
-
-.field-list ul {
-    padding-left: 1em;
-}
-
-.first {
-    margin-top: 0 !important;
-}
-
-p.rubric {
-    margin-top: 30px;
-    font-weight: bold;
-}
-
-img.align-left, .figure.align-left, object.align-left {
-    clear: left;
-    float: left;
-    margin-right: 1em;
-}
-
-img.align-right, .figure.align-right, object.align-right {
-    clear: right;
-    float: right;
-    margin-left: 1em;
-}
-
-img.align-center, .figure.align-center, object.align-center {
-  display: block;
-  margin-left: auto;
-  margin-right: auto;
-}
-
-.align-left {
-    text-align: left;
-}
-
-.align-center {
-    text-align: center;
-}
-
-.align-right {
-    text-align: right;
-}
-
-/* -- sidebars -------------------------------------------------------------- */
-
-div.sidebar {
-    margin: 0 0 0.5em 1em;
-    border: 1px solid #ddb;
-    padding: 7px 7px 0 7px;
-    background-color: #ffe;
-    width: 40%;
-    float: right;
-}
-
-p.sidebar-title {
-    font-weight: bold;
-}
-
-/* -- topics ---------------------------------------------------------------- */
-
-div.topic {
-    border: 1px solid #ccc;
-    padding: 7px 7px 0 7px;
-    margin: 10px 0 10px 0;
-}
-
-p.topic-title {
-    font-size: 1.1em;
-    font-weight: bold;
-    margin-top: 10px;
-}
-
-/* -- admonitions ----------------------------------------------------------- */
-
-div.admonition {
-    margin-top: 10px;
-    margin-bottom: 10px;
-    padding: 7px;
-}
-
-div.admonition dt {
-    font-weight: bold;
-}
-
-div.admonition dl {
-    margin-bottom: 0;
-}
-
-p.admonition-title {
-    margin: 0px 10px 5px 0px;
-    font-weight: bold;
-}
-
-div.body p.centered {
-    text-align: center;
-    margin-top: 25px;
-}
-
-/* -- tables ---------------------------------------------------------------- */
-
-table.docutils {
-    border: 0;
-    border-collapse: collapse;
-}
-
-table.docutils td, table.docutils th {
-    padding: 1px 8px 1px 5px;
-    border-top: 0;
-    border-left: 0;
-    border-right: 0;
-    border-bottom: 1px solid #aaa;
-}
-
-table.field-list td, table.field-list th {
-    border: 0 !important;
-}
-
-table.footnote td, table.footnote th {
-    border: 0 !important;
-}
-
-th {
-    text-align: left;
-    padding-right: 5px;
-}
-
-table.citation {
-    border-left: solid 1px gray;
-    margin-left: 1px;
-}
-
-table.citation td {
-    border-bottom: none;
-}
-
-/* -- other body styles ----------------------------------------------------- */
-
-ol.arabic {
-    list-style: decimal;
-}
-
-ol.loweralpha {
-    list-style: lower-alpha;
-}
-
-ol.upperalpha {
-    list-style: upper-alpha;
-}
-
-ol.lowerroman {
-    list-style: lower-roman;
-}
-
-ol.upperroman {
-    list-style: upper-roman;
-}
-
-dl {
-    margin-bottom: 15px;
-}
-
-dd p {
-    margin-top: 0px;
-}
-
-dd ul, dd table {
-    margin-bottom: 10px;
-}
-
-dd {
-    margin-top: 3px;
-    margin-bottom: 10px;
-    margin-left: 30px;
-}
-
-dt:target, .highlighted {
-    background-color: #fbe54e;
-}
-
-dl.glossary dt {
-    font-weight: bold;
-    font-size: 1.1em;
-}
-
-.field-list ul {
-    margin: 0;
-    padding-left: 1em;
-}
-
-.field-list p {
-    margin: 0;
-}
-
-.refcount {
-    color: #060;
-}
-
-.optional {
-    font-size: 1.3em;
-}
-
-.versionmodified {
-    font-style: italic;
-}
-
-.system-message {
-    background-color: #fda;
-    padding: 5px;
-    border: 3px solid red;
-}
-
-.footnote:target  {
-    background-color: #ffa;
-}
-
-.line-block {
-    display: block;
-    margin-top: 1em;
-    margin-bottom: 1em;
-}
-
-.line-block .line-block {
-    margin-top: 0;
-    margin-bottom: 0;
-    margin-left: 1.5em;
-}
-
-.guilabel, .menuselection {
-    font-family: sans-serif;
-}
-
-.accelerator {
-    text-decoration: underline;
-}
-
-.classifier {
-    font-style: oblique;
-}
-
-abbr, acronym {
-    border-bottom: dotted 1px;
-    cursor: help;
-}
-
-/* -- code displays --------------------------------------------------------- */
-
-pre {
-    overflow: auto;
-    overflow-y: hidden;  /* fixes display issues on Chrome browsers */
-}
-
-td.linenos pre {
-    padding: 5px 0px;
-    border: 0;
-    background-color: transparent;
-    color: #aaa;
-}
-
-table.highlighttable {
-    margin-left: 0.5em;
-}
-
-table.highlighttable td {
-    padding: 0 0.5em 0 0.5em;
-}
-
-tt.descname {
-    background-color: transparent;
-    font-weight: bold;
-    font-size: 1.2em;
-}
-
-tt.descclassname {
-    background-color: transparent;
-}
-
-tt.xref, a tt {
-    background-color: transparent;
-    font-weight: bold;
-}
-
-h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
-    background-color: transparent;
-}
-
-.viewcode-link {
-    float: right;
-}
-
-.viewcode-back {
-    float: right;
-    font-family: sans-serif;
-}
-
-div.viewcode-block:target {
-    margin: -1px -10px;
-    padding: 0 10px;
-}
-
-/* -- math display ---------------------------------------------------------- */
-
-img.math {
-    vertical-align: middle;
-}
-
-div.body div.math p {
-    text-align: center;
-}
-
-span.eqno {
-    float: right;
-}
-
-/* -- printout stylesheet --------------------------------------------------- */
-
-@media print {
-    div.document,
-    div.documentwrapper,
-    div.bodywrapper {
-        margin: 0 !important;
-        width: 100%;
-    }
-
-    div.sphinxsidebar,
-    div.related,
-    div.footer,
-    #top-link {
-        display: none;
-    }
-}
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/docs/build/html/_static/nature.css b/vendor/distribute-0.6.34/docs/build/html/_static/nature.css
deleted file mode 100644
index 891c59ebfbf19346da36612253af351c90b83e3e..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_static/nature.css
+++ /dev/null
@@ -1,236 +0,0 @@
-/**
- * Sphinx stylesheet -- default theme
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- */
- 
-@import url("basic.css");
- 
-/* -- page layout ----------------------------------------------------------- */
- 
-body {
-    font-family: Arial, sans-serif;
-    font-size: 100%;
-    background-color: #111111;
-    color: #555555;
-    margin: 0;
-    padding: 0;
-}
-
-div.documentwrapper {
-    float: left;
-    width: 100%;
-}
-
-div.bodywrapper {
-    margin: 0 0 0 300px;
-}
-
-hr{
-    border: 1px solid #B1B4B6;
-}
- 
-div.document {
-    background-color: #fafafa;
-}
- 
-div.body {
-    background-color: #ffffff;
-    color: #3E4349;
-    padding: 1em 30px 30px 30px;
-    font-size: 0.9em;
-}
- 
-div.footer {
-    color: #555;
-    width: 100%;
-    padding: 13px 0;
-    text-align: center;
-    font-size: 75%;
-}
- 
-div.footer a {
-    color: #444444;
-}
- 
-div.related {
-    background-color: #6BA81E;
-    line-height: 36px;
-    color: #ffffff;
-    text-shadow: 0px 1px 0 #444444;
-    font-size: 1.1em;
-}
- 
-div.related a {
-    color: #E2F3CC;
-}
-
-div.related .right {
-    font-size: 0.9em;
-}
-
-div.sphinxsidebar {
-    font-size: 0.9em;
-    line-height: 1.5em;
-    width: 300px;
-}
-
-div.sphinxsidebarwrapper{
-    padding: 20px 0;
-}
- 
-div.sphinxsidebar h3,
-div.sphinxsidebar h4 {
-    font-family: Arial, sans-serif;
-    color: #222222;
-    font-size: 1.2em;
-    font-weight: bold;
-    margin: 0;
-    padding: 5px 10px;
-    text-shadow: 1px 1px 0 white
-}
-
-div.sphinxsidebar h3 a {
-    color: #444444;
-}
-
-div.sphinxsidebar p {
-    color: #888888;
-    padding: 5px 20px;
-    margin: 0.5em 0px;
-}
- 
-div.sphinxsidebar p.topless {
-}
- 
-div.sphinxsidebar ul {
-    margin: 10px 10px 10px 20px;
-    padding: 0;
-    color: #000000;
-}
- 
-div.sphinxsidebar a {
-    color: #444444;
-}
-
-div.sphinxsidebar a:hover {
-    color: #E32E00;
-}
-
-div.sphinxsidebar input {
-    border: 1px solid #cccccc;
-    font-family: sans-serif;
-    font-size: 1.1em;
-    padding: 0.15em 0.3em;
-}
-
-div.sphinxsidebar input[type=text]{
-    margin-left: 20px;
-}
- 
-/* -- body styles ----------------------------------------------------------- */
- 
-a {
-    color: #005B81;
-    text-decoration: none;
-}
- 
-a:hover {
-    color: #E32E00;
-}
- 
-div.body h1,
-div.body h2,
-div.body h3,
-div.body h4,
-div.body h5,
-div.body h6 {
-    font-family: Arial, sans-serif;
-    font-weight: normal;
-    color: #212224;
-    margin: 30px 0px 10px 0px;
-    padding: 5px 0 5px 0px;
-    text-shadow: 0px 1px 0 white;
-    border-bottom: 1px solid #C8D5E3;
-}
- 
-div.body h1 { margin-top: 0; font-size: 200%; }
-div.body h2 { font-size: 150%; }
-div.body h3 { font-size: 120%; }
-div.body h4 { font-size: 110%; }
-div.body h5 { font-size: 100%; }
-div.body h6 { font-size: 100%; }
- 
-a.headerlink {
-    color: #c60f0f;
-    font-size: 0.8em;
-    padding: 0 4px 0 4px;
-    text-decoration: none;
-}
- 
-a.headerlink:hover {
-    background-color: #c60f0f;
-    color: white;
-}
- 
-div.body p, div.body dd, div.body li {
-    line-height: 1.8em;
-}
- 
-div.admonition p.admonition-title + p {
-    display: inline;
-}
-
-div.highlight{
-    background-color: white;
-}
-
-div.note {
-    background-color: #eeeeee;
-    border: 1px solid #cccccc;
-}
- 
-div.seealso {
-    background-color: #ffffcc;
-    border: 1px solid #ffff66;
-}
- 
-div.topic {
-    background-color: #fafafa;
-    border-width: 0;
-}
- 
-div.warning {
-    background-color: #ffe4e4;
-    border: 1px solid #ff6666;
-}
- 
-p.admonition-title {
-    display: inline;
-}
- 
-p.admonition-title:after {
-    content: ":";
-}
- 
-pre {
-    padding: 10px;
-    background-color: #fafafa;
-    color: #222222;
-    line-height: 1.5em;
-    font-size: 1.1em;
-    margin: 1.5em 0 1.5em 0;
-    -webkit-box-shadow: 0px 0px 4px #d8d8d8;
-    -moz-box-shadow: 0px 0px 4px #d8d8d8;
-    box-shadow: 0px 0px 4px #d8d8d8;
-}
- 
-tt {
-    color: #222222;
-    padding: 1px 2px;
-    font-size: 1.2em;
-    font-family: monospace;
-}
-
-#table-of-contents ul {
-    padding-left: 2em;
-}
diff --git a/vendor/distribute-0.6.34/docs/build/html/_static/pygments.css b/vendor/distribute-0.6.34/docs/build/html/_static/pygments.css
deleted file mode 100644
index 652b76128b6a174f3407a50fff8735896f47d863..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/build/html/_static/pygments.css
+++ /dev/null
@@ -1,54 +0,0 @@
-.c { color: #999988; font-style: italic } /* Comment */
-.k { font-weight: bold } /* Keyword */
-.o { font-weight: bold } /* Operator */
-.cm { color: #999988; font-style: italic } /* Comment.Multiline */
-.cp { color: #999999; font-weight: bold } /* Comment.preproc */
-.c1 { color: #999988; font-style: italic } /* Comment.Single */
-.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
-.ge { font-style: italic } /* Generic.Emph */
-.gr { color: #aa0000 } /* Generic.Error */
-.gh { color: #999999 } /* Generic.Heading */
-.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
-.go { color: #111 } /* Generic.Output */
-.gp { color: #555555 } /* Generic.Prompt */
-.gs { font-weight: bold } /* Generic.Strong */
-.gu { color: #aaaaaa } /* Generic.Subheading */
-.gt { color: #aa0000 } /* Generic.Traceback */
-.kc { font-weight: bold } /* Keyword.Constant */
-.kd { font-weight: bold } /* Keyword.Declaration */
-.kp { font-weight: bold } /* Keyword.Pseudo */
-.kr { font-weight: bold } /* Keyword.Reserved */
-.kt { color: #445588; font-weight: bold } /* Keyword.Type */
-.m { color: #009999 } /* Literal.Number */
-.s { color: #bb8844 } /* Literal.String */
-.na { color: #008080 } /* Name.Attribute */
-.nb { color: #999999 } /* Name.Builtin */
-.nc { color: #445588; font-weight: bold } /* Name.Class */
-.no { color: #ff99ff } /* Name.Constant */
-.ni { color: #800080 } /* Name.Entity */
-.ne { color: #990000; font-weight: bold } /* Name.Exception */
-.nf { color: #990000; font-weight: bold } /* Name.Function */
-.nn { color: #555555 } /* Name.Namespace */
-.nt { color: #000080 } /* Name.Tag */
-.nv { color: purple } /* Name.Variable */
-.ow { font-weight: bold } /* Operator.Word */
-.mf { color: #009999 } /* Literal.Number.Float */
-.mh { color: #009999 } /* Literal.Number.Hex */
-.mi { color: #009999 } /* Literal.Number.Integer */
-.mo { color: #009999 } /* Literal.Number.Oct */
-.sb { color: #bb8844 } /* Literal.String.Backtick */
-.sc { color: #bb8844 } /* Literal.String.Char */
-.sd { color: #bb8844 } /* Literal.String.Doc */
-.s2 { color: #bb8844 } /* Literal.String.Double */
-.se { color: #bb8844 } /* Literal.String.Escape */
-.sh { color: #bb8844 } /* Literal.String.Heredoc */
-.si { color: #bb8844 } /* Literal.String.Interpol */
-.sx { color: #bb8844 } /* Literal.String.Other */
-.sr { color: #808000 } /* Literal.String.Regex */
-.s1 { color: #bb8844 } /* Literal.String.Single */
-.ss { color: #bb8844 } /* Literal.String.Symbol */
-.bp { color: #999999 } /* Name.Builtin.Pseudo */
-.vc { color: #ff99ff } /* Name.Variable.Class */
-.vg { color: #ff99ff } /* Name.Variable.Global */
-.vi { color: #ff99ff } /* Name.Variable.Instance */
-.il { color: #009999 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/docs/conf.py b/vendor/distribute-0.6.34/docs/conf.py
deleted file mode 100644
index 15226651f2f375bbefc373c84e3e0649b0e9eb7f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/conf.py
+++ /dev/null
@@ -1,197 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Distribute documentation build configuration file, created by
-# sphinx-quickstart on Fri Jul 17 14:22:37 2009.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# The contents of this file are pickled, so don't put values in the namespace
-# that aren't pickleable (module imports are okay, they're removed automatically).
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys, os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
-
-# -- General configuration -----------------------------------------------------
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = []
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.txt'
-
-# The encoding of source files.
-#source_encoding = 'utf-8'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'Distribute'
-copyright = u'2009-2011, The fellowship of the packaging'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = '0.6.34'
-# The full version, including alpha/beta/rc tags.
-release = '0.6.34'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of documents that shouldn't be included in the build.
-#unused_docs = []
-
-# List of directories, relative to source directory, that shouldn't be searched
-# for source files.
-exclude_trees = []
-
-# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages.  Major themes that come with
-# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = 'nature'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further.  For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['_theme']
-
-# The name for this set of Sphinx documents.  If None, it defaults to
-# "<project> v<release> documentation".
-html_title = "Distribute documentation"
-
-# A shorter title for the navigation bar.  Default is the same as html_title.
-html_short_title = "Distribute"
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-html_sidebars = {'index': 'indexsidebar.html'}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-html_use_modindex = False
-
-# If false, no index is generated.
-html_use_index = False
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it.  The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = ''
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'Distributedoc'
-
-
-# -- Options for LaTeX output --------------------------------------------------
-
-# The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
-
-# The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
-  ('index', 'Distribute.tex', ur'Distribute Documentation',
-   ur'The fellowship of the packaging', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_use_modindex = True
diff --git a/vendor/distribute-0.6.34/docs/easy_install.txt b/vendor/distribute-0.6.34/docs/easy_install.txt
deleted file mode 100644
index 9b4fcfbb6e673e9c97de8503d0deeab602f1594c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/easy_install.txt
+++ /dev/null
@@ -1,1597 +0,0 @@
-============
-Easy Install
-============
-
-Easy Install is a python module (``easy_install``) bundled with ``setuptools``
-that lets you automatically download, build, install, and manage Python
-packages.
-
-Please share your experiences with us! If you encounter difficulty installing
-a package, please contact us via the `distutils mailing list
-<http://mail.python.org/pipermail/distutils-sig/>`_.  (Note: please DO NOT send
-private email directly to the author of setuptools; it will be discarded.  The
-mailing list is a searchable archive of previously-asked and answered
-questions; you should begin your research there before reporting something as a
-bug -- and then do so via list discussion first.)
-
-(Also, if you'd like to learn about how you can use ``setuptools`` to make your
-own packages work better with EasyInstall, or provide EasyInstall-like features
-without requiring your users to use EasyInstall directly, you'll probably want
-to check out the full `setuptools`_ documentation as well.)
-
-.. contents:: **Table of Contents**
-
-
-Using "Easy Install"
-====================
-
-
-.. _installation instructions:
-
-Installing "Easy Install"
--------------------------
-
-Please see the `setuptools PyPI page <http://pypi.python.org/pypi/setuptools>`_
-for download links and basic installation instructions for each of the
-supported platforms.
-
-You will need at least Python 2.3.5, or if you are on a 64-bit platform, Python
-2.4.  An ``easy_install`` script will be installed in the normal location for
-Python scripts on your platform.
-
-Note that the instructions on the setuptools PyPI page assume that you are
-are installling to Python's primary ``site-packages`` directory.  If this is
-not the case, you should consult the section below on `Custom Installation
-Locations`_ before installing.  (And, on Windows, you should not use the
-``.exe`` installer when installing to an alternate location.)
-
-Note that ``easy_install`` normally works by downloading files from the
-internet.  If you are behind an NTLM-based firewall that prevents Python
-programs from accessing the net directly, you may wish to first install and use
-the `APS proxy server <http://ntlmaps.sf.net/>`_, which lets you get past such
-firewalls in the same way that your web browser(s) do.
-
-(Alternately, if you do not wish easy_install to actually download anything, you
-can restrict it from doing so with the ``--allow-hosts`` option; see the
-sections on `restricting downloads with --allow-hosts`_ and `command-line
-options`_ for more details.)
-
-
-Troubleshooting
-~~~~~~~~~~~~~~~
-
-If EasyInstall/setuptools appears to install correctly, and you can run the
-``easy_install`` command but it fails with an ``ImportError``, the most likely
-cause is that you installed to a location other than ``site-packages``,
-without taking any of the steps described in the `Custom Installation
-Locations`_ section below.  Please see that section and follow the steps to
-make sure that your custom location will work correctly.  Then re-install.
-
-Similarly, if you can run ``easy_install``, and it appears to be installing
-packages, but then you can't import them, the most likely issue is that you
-installed EasyInstall correctly but are using it to install packages to a
-non-standard location that hasn't been properly prepared.  Again, see the
-section on `Custom Installation Locations`_ for more details.
-
-
-Windows Notes
-~~~~~~~~~~~~~
-
-On Windows, an ``easy_install.exe`` launcher will also be installed, so that
-you can just type ``easy_install`` as long as it's on your ``PATH``.  If typing
-``easy_install`` at the command prompt doesn't work, check to make sure your
-``PATH`` includes the appropriate ``C:\\Python2X\\Scripts`` directory.  On
-most current versions of Windows, you can change the ``PATH`` by right-clicking
-"My Computer", choosing "Properties" and selecting the "Advanced" tab, then
-clicking the "Environment Variables" button.  ``PATH`` will be in the "System
-Variables" section, and you will need to exit and restart your command shell
-(command.com, cmd.exe, bash, or other) for the change to take effect.  Be sure
-to add a ``;`` after the last item on ``PATH`` before adding the scripts
-directory to it.
-
-Note that instead of changing your ``PATH`` to include the Python scripts
-directory, you can also retarget the installation location for scripts so they
-go on a directory that's already on the ``PATH``.  For more information see the
-sections below on `Command-Line Options`_ and `Configuration Files`_.  You
-can pass command line options (such as ``--script-dir``) to
-``distribute_setup.py`` to control where ``easy_install.exe`` will be installed.
-
-
-
-Downloading and Installing a Package
-------------------------------------
-
-For basic use of ``easy_install``, you need only supply the filename or URL of
-a source distribution or .egg file (`Python Egg`__).
-
-__ http://peak.telecommunity.com/DevCenter/PythonEggs
-
-**Example 1**. Install a package by name, searching PyPI for the latest
-version, and automatically downloading, building, and installing it::
-
-    easy_install SQLObject
-
-**Example 2**. Install or upgrade a package by name and version by finding
-links on a given "download page"::
-
-    easy_install -f http://pythonpaste.org/package_index.html SQLObject
-
-**Example 3**. Download a source distribution from a specified URL,
-automatically building and installing it::
-
-    easy_install http://example.com/path/to/MyPackage-1.2.3.tgz
-
-**Example 4**. Install an already-downloaded .egg file::
-
-    easy_install /my_downloads/OtherPackage-3.2.1-py2.3.egg
-
-**Example 5**.  Upgrade an already-installed package to the latest version
-listed on PyPI::
-
-    easy_install --upgrade PyProtocols
-
-**Example 6**.  Install a source distribution that's already downloaded and
-extracted in the current directory (New in 0.5a9)::
-
-    easy_install .
-
-**Example 7**.  (New in 0.6a1) Find a source distribution or Subversion
-checkout URL for a package, and extract it or check it out to
-``~/projects/sqlobject`` (the name will always be in all-lowercase), where it
-can be examined or edited.  (The package will not be installed, but it can
-easily be installed with ``easy_install ~/projects/sqlobject``.  See `Editing
-and Viewing Source Packages`_ below for more info.)::
-
-    easy_install --editable --build-directory ~/projects SQLObject
-
-**Example 7**. (New in 0.6.11) Install a distribution within your home dir::
-
-    easy_install --user SQLAlchemy
-
-Easy Install accepts URLs, filenames, PyPI package names (i.e., ``distutils``
-"distribution" names), and package+version specifiers.  In each case, it will
-attempt to locate the latest available version that meets your criteria.
-
-When downloading or processing downloaded files, Easy Install recognizes
-distutils source distribution files with extensions of .tgz, .tar, .tar.gz,
-.tar.bz2, or .zip.  And of course it handles already-built .egg
-distributions as well as ``.win32.exe`` installers built using distutils.
-
-By default, packages are installed to the running Python installation's
-``site-packages`` directory, unless you provide the ``-d`` or ``--install-dir``
-option to specify an alternative directory, or specify an alternate location
-using distutils configuration files.  (See `Configuration Files`_, below.)
-
-By default, any scripts included with the package are installed to the running
-Python installation's standard script installation location.  However, if you
-specify an installation directory via the command line or a config file, then
-the default directory for installing scripts will be the same as the package
-installation directory, to ensure that the script will have access to the
-installed package.  You can override this using the ``-s`` or ``--script-dir``
-option.
-
-Installed packages are added to an ``easy-install.pth`` file in the install
-directory, so that Python will always use the most-recently-installed version
-of the package.  If you would like to be able to select which version to use at
-runtime, you should use the ``-m`` or ``--multi-version`` option.
-
-
-Upgrading a Package
--------------------
-
-You don't need to do anything special to upgrade a package: just install the
-new version, either by requesting a specific version, e.g.::
-
-    easy_install "SomePackage==2.0"
-
-a version greater than the one you have now::
-
-    easy_install "SomePackage>2.0"
-
-using the upgrade flag, to find the latest available version on PyPI::
-
-    easy_install --upgrade SomePackage
-
-or by using a download page, direct download URL, or package filename::
-
-    easy_install -f http://example.com/downloads ExamplePackage
-
-    easy_install http://example.com/downloads/ExamplePackage-2.0-py2.4.egg
-
-    easy_install my_downloads/ExamplePackage-2.0.tgz
-
-If you're using ``-m`` or ``--multi-version`` , using the ``require()``
-function at runtime automatically selects the newest installed version of a
-package that meets your version criteria.  So, installing a newer version is
-the only step needed to upgrade such packages.
-
-If you're installing to a directory on PYTHONPATH, or a configured "site"
-directory (and not using ``-m``), installing a package automatically replaces
-any previous version in the ``easy-install.pth`` file, so that Python will
-import the most-recently installed version by default.  So, again, installing
-the newer version is the only upgrade step needed.
-
-If you haven't suppressed script installation (using ``--exclude-scripts`` or
-``-x``), then the upgraded version's scripts will be installed, and they will
-be automatically patched to ``require()`` the corresponding version of the
-package, so that you can use them even if they are installed in multi-version
-mode.
-
-``easy_install`` never actually deletes packages (unless you're installing a
-package with the same name and version number as an existing package), so if
-you want to get rid of older versions of a package, please see `Uninstalling
-Packages`_, below.
-
-
-Changing the Active Version
----------------------------
-
-If you've upgraded a package, but need to revert to a previously-installed
-version, you can do so like this::
-
-    easy_install PackageName==1.2.3
-
-Where ``1.2.3`` is replaced by the exact version number you wish to switch to.
-If a package matching the requested name and version is not already installed
-in a directory on ``sys.path``, it will be located via PyPI and installed.
-
-If you'd like to switch to the latest installed version of ``PackageName``, you
-can do so like this::
-
-    easy_install PackageName
-
-This will activate the latest installed version.  (Note: if you have set any
-``find_links`` via distutils configuration files, those download pages will be
-checked for the latest available version of the package, and it will be
-downloaded and installed if it is newer than your current version.)
-
-Note that changing the active version of a package will install the newly
-active version's scripts, unless the ``--exclude-scripts`` or ``-x`` option is
-specified.
-
-
-Uninstalling Packages
----------------------
-
-If you have replaced a package with another version, then you can just delete
-the package(s) you don't need by deleting the PackageName-versioninfo.egg file
-or directory (found in the installation directory).
-
-If you want to delete the currently installed version of a package (or all
-versions of a package), you should first run::
-
-    easy_install -m PackageName
-
-This will ensure that Python doesn't continue to search for a package you're
-planning to remove. After you've done this, you can safely delete the .egg
-files or directories, along with any scripts you wish to remove.
-
-
-Managing Scripts
-----------------
-
-Whenever you install, upgrade, or change versions of a package, EasyInstall
-automatically installs the scripts for the selected package version, unless
-you tell it not to with ``-x`` or ``--exclude-scripts``.  If any scripts in
-the script directory have the same name, they are overwritten.
-
-Thus, you do not normally need to manually delete scripts for older versions of
-a package, unless the newer version of the package does not include a script
-of the same name.  However, if you are completely uninstalling a package, you
-may wish to manually delete its scripts.
-
-EasyInstall's default behavior means that you can normally only run scripts
-from one version of a package at a time.  If you want to keep multiple versions
-of a script available, however, you can simply use the ``--multi-version`` or
-``-m`` option, and rename the scripts that EasyInstall creates.  This works
-because EasyInstall installs scripts as short code stubs that ``require()`` the
-matching version of the package the script came from, so renaming the script
-has no effect on what it executes.
-
-For example, suppose you want to use two versions of the ``rst2html`` tool
-provided by the `docutils <http://docutils.sf.net/>`_ package.  You might
-first install one version::
-
-    easy_install -m docutils==0.3.9
-
-then rename the ``rst2html.py`` to ``r2h_039``, and install another version::
-
-    easy_install -m docutils==0.3.10
-
-This will create another ``rst2html.py`` script, this one using docutils
-version 0.3.10 instead of 0.3.9.  You now have two scripts, each using a
-different version of the package.  (Notice that we used ``-m`` for both
-installations, so that Python won't lock us out of using anything but the most
-recently-installed version of the package.)
-
-
-
-Tips & Techniques
------------------
-
-
-Multiple Python Versions
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-As of version 0.6a11, EasyInstall installs itself under two names:
-``easy_install`` and ``easy_install-N.N``, where ``N.N`` is the Python version
-used to install it.  Thus, if you install EasyInstall for both Python 2.3 and
-2.4, you can use the ``easy_install-2.3`` or ``easy_install-2.4`` scripts to
-install packages for Python 2.3 or 2.4, respectively.
-
-Also, if you're working with Python version 2.4 or higher, you can run Python
-with ``-m easy_install`` to run that particular Python version's
-``easy_install`` command.
-
-
-Restricting Downloads with ``--allow-hosts``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You can use the ``--allow-hosts`` (``-H``) option to restrict what domains
-EasyInstall will look for links and downloads on.  ``--allow-hosts=None``
-prevents downloading altogether.  You can also use wildcards, for example
-to restrict downloading to hosts in your own intranet.  See the section below
-on `Command-Line Options`_ for more details on the ``--allow-hosts`` option.
-
-By default, there are no host restrictions in effect, but you can change this
-default by editing the appropriate `configuration files`_ and adding:
-
-.. code-block:: ini
-
-    [easy_install]
-    allow_hosts = *.myintranet.example.com,*.python.org
-
-The above example would then allow downloads only from hosts in the
-``python.org`` and ``myintranet.example.com`` domains, unless overridden on the
-command line.
-
-
-Installing on Un-networked Machines
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Just copy the eggs or source packages you need to a directory on the target
-machine, then use the ``-f`` or ``--find-links`` option to specify that
-directory's location.  For example::
-
-    easy_install -H None -f somedir SomePackage
-
-will attempt to install SomePackage using only eggs and source packages found
-in ``somedir`` and disallowing all remote access.  You should of course make
-sure you have all of SomePackage's dependencies available in somedir.
-
-If you have another machine of the same operating system and library versions
-(or if the packages aren't platform-specific), you can create the directory of
-eggs using a command like this::
-
-    easy_install -zmaxd somedir SomePackage
-
-This will tell EasyInstall to put zipped eggs or source packages for
-SomePackage and all its dependencies into ``somedir``, without creating any
-scripts or .pth files.  You can then copy the contents of ``somedir`` to the
-target machine.  (``-z`` means zipped eggs, ``-m`` means multi-version, which
-prevents .pth files from being used, ``-a`` means to copy all the eggs needed,
-even if they're installed elsewhere on the machine, and ``-d`` indicates the
-directory to place the eggs in.)
-
-You can also build the eggs from local development packages that were installed
-with the ``setup.py develop`` command, by including the ``-l`` option, e.g.::
-
-    easy_install -zmaxld somedir SomePackage
-
-This will use locally-available source distributions to build the eggs.
-
-
-Packaging Others' Projects As Eggs
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Need to distribute a package that isn't published in egg form?  You can use
-EasyInstall to build eggs for a project.  You'll want to use the ``--zip-ok``,
-``--exclude-scripts``, and possibly ``--no-deps`` options (``-z``, ``-x`` and
-``-N``, respectively).  Use ``-d`` or ``--install-dir`` to specify the location
-where you'd like the eggs placed.  By placing them in a directory that is
-published to the web, you can then make the eggs available for download, either
-in an intranet or to the internet at large.
-
-If someone distributes a package in the form of a single ``.py`` file, you can
-wrap it in an egg by tacking an ``#egg=name-version`` suffix on the file's URL.
-So, something like this::
-
-    easy_install -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
-
-will install the package as an egg, and this::
-
-    easy_install -zmaxd. \
-        -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
-
-will create a ``.egg`` file in the current directory.
-
-
-Creating your own Package Index
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In addition to local directories and the Python Package Index, EasyInstall can
-find download links on most any web page whose URL is given to the ``-f``
-(``--find-links``) option.  In the simplest case, you can simply have a web
-page with links to eggs or Python source packages, even an automatically
-generated directory listing (such as the Apache web server provides).
-
-If you are setting up an intranet site for package downloads, you may want to
-configure the target machines to use your download site by default, adding
-something like this to their `configuration files`_:
-
-.. code-block:: ini
-
-    [easy_install]
-    find_links = http://mypackages.example.com/somedir/
-                 http://turbogears.org/download/
-                 http://peak.telecommunity.com/dist/
-
-As you can see, you can list multiple URLs separated by whitespace, continuing
-on multiple lines if necessary (as long as the subsequent lines are indented.
-
-If you are more ambitious, you can also create an entirely custom package index
-or PyPI mirror.  See the ``--index-url`` option under `Command-Line Options`_,
-below, and also the section on `Package Index "API"`_.
-
-
-Password-Protected Sites
-------------------------
-
-If a site you want to download from is password-protected using HTTP "Basic"
-authentication, you can specify your credentials in the URL, like so::
-
-    http://some_userid:some_password@some.example.com/some_path/
-
-You can do this with both index page URLs and direct download URLs.  As long
-as any HTML pages read by easy_install use *relative* links to point to the
-downloads, the same user ID and password will be used to do the downloading.
-
-
-Controlling Build Options
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall respects standard distutils `Configuration Files`_, so you can use
-them to configure build options for packages that it installs from source.  For
-example, if you are on Windows using the MinGW compiler, you can configure the
-default compiler by putting something like this:
-
-.. code-block:: ini
-
-    [build]
-    compiler = mingw32
-
-into the appropriate distutils configuration file.  In fact, since this is just
-normal distutils configuration, it will affect any builds using that config
-file, not just ones done by EasyInstall.  For example, if you add those lines
-to ``distutils.cfg`` in the ``distutils`` package directory, it will be the
-default compiler for *all* packages you build.  See `Configuration Files`_
-below for a list of the standard configuration file locations, and links to
-more documentation on using distutils configuration files.
-
-
-Editing and Viewing Source Packages
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Sometimes a package's source distribution  contains additional documentation,
-examples, configuration files, etc., that are not part of its actual code.  If
-you want to be able to examine these files, you can use the ``--editable``
-option to EasyInstall, and EasyInstall will look for a source distribution
-or Subversion URL for the package, then download and extract it or check it out
-as a subdirectory of the ``--build-directory`` you specify.  If you then wish
-to install the package after editing or configuring it, you can do so by
-rerunning EasyInstall with that directory as the target.
-
-Note that using ``--editable`` stops EasyInstall from actually building or
-installing the package; it just finds, obtains, and possibly unpacks it for
-you.  This allows you to make changes to the package if necessary, and to
-either install it in development mode using ``setup.py develop`` (if the
-package uses setuptools, that is), or by running ``easy_install projectdir``
-(where ``projectdir`` is the subdirectory EasyInstall created for the
-downloaded package.
-
-In order to use ``--editable`` (``-e`` for short), you *must* also supply a
-``--build-directory`` (``-b`` for short).  The project will be placed in a
-subdirectory of the build directory.  The subdirectory will have the same
-name as the project itself, but in all-lowercase.  If a file or directory of
-that name already exists, EasyInstall will print an error message and exit.
-
-Also, when using ``--editable``, you cannot use URLs or filenames as arguments.
-You *must* specify project names (and optional version requirements) so that
-EasyInstall knows what directory name(s) to create.  If you need to force
-EasyInstall to use a particular URL or filename, you should specify it as a
-``--find-links`` item (``-f`` for short), and then also specify
-the project name, e.g.::
-
-    easy_install -eb ~/projects \
-     -fhttp://prdownloads.sourceforge.net/ctypes/ctypes-0.9.6.tar.gz?download \
-     ctypes==0.9.6
-
-
-Dealing with Installation Conflicts
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-(NOTE: As of 0.6a11, this section is obsolete; it is retained here only so that
-people using older versions of EasyInstall can consult it.  As of version
-0.6a11, installation conflicts are handled automatically without deleting the
-old or system-installed packages, and without ignoring the issue.  Instead,
-eggs are automatically shifted to the front of ``sys.path`` using special
-code added to the ``easy-install.pth`` file.  So, if you are using version
-0.6a11 or better of setuptools, you do not need to worry about conflicts,
-and the following issues do not apply to you.)
-
-EasyInstall installs distributions in a "managed" way, such that each
-distribution can be independently activated or deactivated on ``sys.path``.
-However, packages that were not installed by EasyInstall are "unmanaged",
-in that they usually live all in one directory and cannot be independently
-activated or deactivated.
-
-As a result, if you are using EasyInstall to upgrade an existing package, or
-to install a package with the same name as an existing package, EasyInstall
-will warn you of the conflict.  (This is an improvement over ``setup.py
-install``, becuase the ``distutils`` just install new packages on top of old
-ones, possibly combining two unrelated packages or leaving behind modules that
-have been deleted in the newer version of the package.)
-
-By default, EasyInstall will stop the installation if it detects a conflict
-between an existing, "unmanaged" package, and a module or package in any of
-the distributions you're installing.  It will display a list of all of the
-existing files and directories that would need to be deleted for the new
-package to be able to function correctly.  You can then either delete these
-conflicting files and directories yourself and re-run EasyInstall, or you can
-just use the ``--delete-conflicting`` or ``--ignore-conflicts-at-my-risk``
-options, as described under `Command-Line Options`_, below.
-
-Of course, once you've replaced all of your existing "unmanaged" packages with
-versions managed by EasyInstall, you won't have any more conflicts to worry
-about!
-
-
-Compressed Installation
-~~~~~~~~~~~~~~~~~~~~~~~
-
-EasyInstall tries to install packages in zipped form, if it can.  Zipping
-packages can improve Python's overall import performance if you're not using
-the ``--multi-version`` option, because Python processes zipfile entries on
-``sys.path`` much faster than it does directories.
-
-As of version 0.5a9, EasyInstall analyzes packages to determine whether they
-can be safely installed as a zipfile, and then acts on its analysis.  (Previous
-versions would not install a package as a zipfile unless you used the
-``--zip-ok`` option.)
-
-The current analysis approach is fairly conservative; it currenly looks for:
-
- * Any use of the ``__file__`` or ``__path__`` variables (which should be
-   replaced with ``pkg_resources`` API calls)
-
- * Possible use of ``inspect`` functions that expect to manipulate source files
-   (e.g. ``inspect.getsource()``)
-
- * Top-level modules that might be scripts used with ``python -m`` (Python 2.4)
-
-If any of the above are found in the package being installed, EasyInstall will
-assume that the package cannot be safely run from a zipfile, and unzip it to
-a directory instead.  You can override this analysis with the ``-zip-ok`` flag,
-which will tell EasyInstall to install the package as a zipfile anyway.  Or,
-you can use the ``--always-unzip`` flag, in which case EasyInstall will always
-unzip, even if its analysis says the package is safe to run as a zipfile.
-
-Normally, however, it is simplest to let EasyInstall handle the determination
-of whether to zip or unzip, and only specify overrides when needed to work
-around a problem.  If you find you need to override EasyInstall's guesses, you
-may want to contact the package author and the EasyInstall maintainers, so that
-they can make appropriate changes in future versions.
-
-(Note: If a package uses ``setuptools`` in its setup script, the package author
-has the option to declare the package safe or unsafe for zipped usage via the
-``zip_safe`` argument to ``setup()``.  If the package author makes such a
-declaration, EasyInstall believes the package's author and does not perform its
-own analysis.  However, your command-line option, if any, will still override
-the package author's choice.)
-
-
-Reference Manual
-================
-
-Configuration Files
--------------------
-
-(New in 0.4a2)
-
-You may specify default options for EasyInstall using the standard
-distutils configuration files, under the command heading ``easy_install``.
-EasyInstall will look first for a ``setup.cfg`` file in the current directory,
-then a ``~/.pydistutils.cfg`` or ``$HOME\\pydistutils.cfg`` (on Unix-like OSes
-and Windows, respectively), and finally a ``distutils.cfg`` file in the
-``distutils`` package directory.  Here's a simple example:
-
-.. code-block:: ini
-
-    [easy_install]
-
-    # set the default location to install packages
-    install_dir = /home/me/lib/python
-
-    # Notice that indentation can be used to continue an option
-    # value; this is especially useful for the "--find-links"
-    # option, which tells easy_install to use download links on
-    # these pages before consulting PyPI:
-    #
-    find_links = http://sqlobject.org/
-                 http://peak.telecommunity.com/dist/
-
-In addition to accepting configuration for its own options under
-``[easy_install]``, EasyInstall also respects defaults specified for other
-distutils commands.  For example, if you don't set an ``install_dir`` for
-``[easy_install]``, but *have* set an ``install_lib`` for the ``[install]``
-command, this will become EasyInstall's default installation directory.  Thus,
-if you are already using distutils configuration files to set default install
-locations, build options, etc., EasyInstall will respect your existing settings
-until and unless you override them explicitly in an ``[easy_install]`` section.
-
-For more information, see also the current Python documentation on the `use and
-location of distutils configuration files <http://docs.python.org/inst/config-syntax.html>`_.
-
-Notice that ``easy_install`` will use the ``setup.cfg`` from the current
-working directory only if it was triggered from ``setup.py`` through the
-``install_requires`` option. The standalone command will not use that file.
-
-Command-Line Options
---------------------
-
-``--zip-ok, -z``
-    Install all packages as zip files, even if they are marked as unsafe for
-    running as a zipfile.  This can be useful when EasyInstall's analysis
-    of a non-setuptools package is too conservative, but keep in mind that
-    the package may not work correctly.  (Changed in 0.5a9; previously this
-    option was required in order for zipped installation to happen at all.)
-
-``--always-unzip, -Z``
-    Don't install any packages as zip files, even if the packages are marked
-    as safe for running as a zipfile.  This can be useful if a package does
-    something unsafe, but not in a way that EasyInstall can easily detect.
-    EasyInstall's default analysis is currently very conservative, however, so
-    you should only use this option if you've had problems with a particular
-    package, and *after* reporting the problem to the package's maintainer and
-    to the EasyInstall maintainers.
-
-    (Note: the ``-z/-Z`` options only affect the installation of newly-built
-    or downloaded packages that are not already installed in the target
-    directory; if you want to convert an existing installed version from
-    zipped to unzipped or vice versa, you'll need to delete the existing
-    version first, and re-run EasyInstall.)
-
-``--multi-version, -m``
-    "Multi-version" mode. Specifying this option prevents ``easy_install`` from
-    adding an ``easy-install.pth`` entry for the package being installed, and
-    if an entry for any version the package already exists, it will be removed
-    upon successful installation. In multi-version mode, no specific version of
-    the package is available for importing, unless you use
-    ``pkg_resources.require()`` to put it on ``sys.path``. This can be as
-    simple as::
-
-        from pkg_resources import require
-        require("SomePackage", "OtherPackage", "MyPackage")
-
-    which will put the latest installed version of the specified packages on
-    ``sys.path`` for you. (For more advanced uses, like selecting specific
-    versions and enabling optional dependencies, see the ``pkg_resources`` API
-    doc.)
-
-    Changed in 0.6a10: this option is no longer silently enabled when
-    installing to a non-PYTHONPATH, non-"site" directory.  You must always
-    explicitly use this option if you want it to be active.
-
-``--upgrade, -U``   (New in 0.5a4)
-    By default, EasyInstall only searches online if a project/version
-    requirement can't be met by distributions already installed
-    on sys.path or the installation directory.  However, if you supply the
-    ``--upgrade`` or ``-U`` flag, EasyInstall will always check the package
-    index and ``--find-links`` URLs before selecting a version to install.  In
-    this way, you can force EasyInstall to use the latest available version of
-    any package it installs (subject to any version requirements that might
-    exclude such later versions).
-
-``--install-dir=DIR, -d DIR``
-    Set the installation directory. It is up to you to ensure that this
-    directory is on ``sys.path`` at runtime, and to use
-    ``pkg_resources.require()`` to enable the installed package(s) that you
-    need.
-
-    (New in 0.4a2) If this option is not directly specified on the command line
-    or in a distutils configuration file, the distutils default installation
-    location is used.  Normally, this would be the ``site-packages`` directory,
-    but if you are using distutils configuration files, setting things like
-    ``prefix`` or ``install_lib``, then those settings are taken into
-    account when computing the default installation directory, as is the
-    ``--prefix`` option.
-
-``--script-dir=DIR, -s DIR``
-    Set the script installation directory.  If you don't supply this option
-    (via the command line or a configuration file), but you *have* supplied
-    an ``--install-dir`` (via command line or config file), then this option
-    defaults to the same directory, so that the scripts will be able to find
-    their associated package installation.  Otherwise, this setting defaults
-    to the location where the distutils would normally install scripts, taking
-    any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
-    Don't install scripts.  This is useful if you need to install multiple
-    versions of a package, but do not want to reset the version that will be
-    run by scripts that are already installed.
-
-``--user`` (New in 0.6.11)
-    Use the the user-site-packages as specified in :pep:`370`
-    instead of the global site-packages.
-
-``--always-copy, -a``   (New in 0.5a4)
-    Copy all needed distributions to the installation directory, even if they
-    are already present in a directory on sys.path.  In older versions of
-    EasyInstall, this was the default behavior, but now you must explicitly
-    request it.  By default, EasyInstall will no longer copy such distributions
-    from other sys.path directories to the installation directory, unless you
-    explicitly gave the distribution's filename on the command line.
-
-    Note that as of 0.6a10, using this option excludes "system" and
-    "development" eggs from consideration because they can't be reliably
-    copied.  This may cause EasyInstall to choose an older version of a package
-    than what you expected, or it may cause downloading and installation of a
-    fresh copy of something that's already installed.  You will see warning
-    messages for any eggs that EasyInstall skips, before it falls back to an
-    older version or attempts to download a fresh copy.
-
-``--find-links=URLS_OR_FILENAMES, -f URLS_OR_FILENAMES``
-    Scan the specified "download pages" or directories for direct links to eggs
-    or other distributions.  Any existing file or directory names or direct
-    download URLs are immediately added to EasyInstall's search cache, and any
-    indirect URLs (ones that don't point to eggs or other recognized archive
-    formats) are added to a list of additional places to search for download
-    links.  As soon as EasyInstall has to go online to find a package (either
-    because it doesn't exist locally, or because ``--upgrade`` or ``-U`` was
-    used), the specified URLs will be downloaded and scanned for additional
-    direct links.
-
-    Eggs and archives found by way of ``--find-links`` are only downloaded if
-    they are needed to meet a requirement specified on the command line; links
-    to unneeded packages are ignored.
-
-    If all requested packages can be found using links on the specified
-    download pages, the Python Package Index will not be consulted unless you
-    also specified the ``--upgrade`` or ``-U`` option.
-
-    (Note: if you want to refer to a local HTML file containing links, you must
-    use a ``file:`` URL, as filenames that do not refer to a directory, egg, or
-    archive are ignored.)
-
-    You may specify multiple URLs or file/directory names with this option,
-    separated by whitespace.  Note that on the command line, you will probably
-    have to surround the URL list with quotes, so that it is recognized as a
-    single option value.  You can also specify URLs in a configuration file;
-    see `Configuration Files`_, above.
-
-    Changed in 0.6a10: previously all URLs and directories passed to this
-    option were scanned as early as possible, but from 0.6a10 on, only
-    directories and direct archive links are scanned immediately; URLs are not
-    retrieved unless a package search was already going to go online due to a
-    package not being available locally, or due to the use of the ``--update``
-    or ``-U`` option.
-
-``--no-find-links`` Blocks the addition of any link. (New in Distribute 0.6.11)
-    This is useful if you want to avoid adding links defined in a project
-    easy_install is installing (wether it's a requested project or a
-    dependency.). When used, ``--find-links`` is ignored.
-
-``--delete-conflicting, -D`` (Removed in 0.6a11)
-    (As of 0.6a11, this option is no longer necessary; please do not use it!)
-
-    If you are replacing a package that was previously installed *without*
-    using EasyInstall, the old version may end up on ``sys.path`` before the
-    version being installed with EasyInstall.  EasyInstall will normally abort
-    the installation of a package if it detects such a conflict, and ask you to
-    manually remove the conflicting files or directories.  If you specify this
-    option, however, EasyInstall will attempt to delete the files or
-    directories itself, and then proceed with the installation.
-
-``--ignore-conflicts-at-my-risk`` (Removed in 0.6a11)
-    (As of 0.6a11, this option is no longer necessary; please do not use it!)
-
-    Ignore conflicting packages and proceed with installation anyway, even
-    though it means the package probably won't work properly.  If the
-    conflicting package is in a directory you can't write to, this may be your
-    only option, but you will need to take more invasive measures to get the
-    installed package to work, like manually adding it to ``PYTHONPATH`` or to
-    ``sys.path`` at runtime.
-
-``--index-url=URL, -i URL`` (New in 0.4a1; default changed in 0.6c7)
-    Specifies the base URL of the Python Package Index.  The default is
-    http://pypi.python.org/simple if not specified.  When a package is requested
-    that is not locally available or linked from a ``--find-links`` download
-    page, the package index will be searched for download pages for the needed
-    package, and those download pages will be searched for links to download
-    an egg or source distribution.
-
-``--editable, -e`` (New in 0.6a1)
-    Only find and download source distributions for the specified projects,
-    unpacking them to subdirectories of the specified ``--build-directory``.
-    EasyInstall will not actually build or install the requested projects or
-    their dependencies; it will just find and extract them for you.  See
-    `Editing and Viewing Source Packages`_ above for more details.
-
-``--build-directory=DIR, -b DIR`` (UPDATED in 0.6a1)
-    Set the directory used to build source packages.  If a package is built
-    from a source distribution or checkout, it will be extracted to a
-    subdirectory of the specified directory.  The subdirectory will have the
-    same name as the extracted distribution's project, but in all-lowercase.
-    If a file or directory of that name already exists in the given directory,
-    a warning will be printed to the console, and the build will take place in
-    a temporary directory instead.
-
-    This option is most useful in combination with the ``--editable`` option,
-    which forces EasyInstall to *only* find and extract (but not build and
-    install) source distributions.  See `Editing and Viewing Source Packages`_,
-    above, for more information.
-
-``--verbose, -v, --quiet, -q`` (New in 0.4a4)
-    Control the level of detail of EasyInstall's progress messages.  The
-    default detail level is "info", which prints information only about
-    relatively time-consuming operations like running a setup script, unpacking
-    an archive, or retrieving a URL.  Using ``-q`` or ``--quiet`` drops the
-    detail level to "warn", which will only display installation reports,
-    warnings, and errors.  Using ``-v`` or ``--verbose`` increases the detail
-    level to include individual file-level operations, link analysis messages,
-    and distutils messages from any setup scripts that get run.  If you include
-    the ``-v`` option more than once, the second and subsequent uses are passed
-    down to any setup scripts, increasing the verbosity of their reporting as
-    well.
-
-``--dry-run, -n`` (New in 0.4a4)
-    Don't actually install the package or scripts.  This option is passed down
-    to any setup scripts run, so packages should not actually build either.
-    This does *not* skip downloading, nor does it skip extracting source
-    distributions to a temporary/build directory.
-
-``--optimize=LEVEL``, ``-O LEVEL`` (New in 0.4a4)
-    If you are installing from a source distribution, and are *not* using the
-    ``--zip-ok`` option, this option controls the optimization level for
-    compiling installed ``.py`` files to ``.pyo`` files.  It does not affect
-    the compilation of modules contained in ``.egg`` files, only those in
-    ``.egg`` directories.  The optimization level can be set to 0, 1, or 2;
-    the default is 0 (unless it's set under ``install`` or ``install_lib`` in
-    one of your distutils configuration files).
-
-``--record=FILENAME``  (New in 0.5a4)
-    Write a record of all installed files to FILENAME.  This is basically the
-    same as the same option for the standard distutils "install" command, and
-    is included for compatibility with tools that expect to pass this option
-    to "setup.py install".
-
-``--site-dirs=DIRLIST, -S DIRLIST``   (New in 0.6a1)
-    Specify one or more custom "site" directories (separated by commas).
-    "Site" directories are directories where ``.pth`` files are processed, such
-    as the main Python ``site-packages`` directory.  As of 0.6a10, EasyInstall
-    automatically detects whether a given directory processes ``.pth`` files
-    (or can be made to do so), so you should not normally need to use this
-    option.  It is is now only necessary if you want to override EasyInstall's
-    judgment and force an installation directory to be treated as if it
-    supported ``.pth`` files.
-
-``--no-deps, -N``  (New in 0.6a6)
-    Don't install any dependencies.  This is intended as a convenience for
-    tools that wrap eggs in a platform-specific packaging system.  (We don't
-    recommend that you use it for anything else.)
-
-``--allow-hosts=PATTERNS, -H PATTERNS``   (New in 0.6a6)
-    Restrict downloading and spidering to hosts matching the specified glob
-    patterns.  E.g. ``-H *.python.org`` restricts web access so that only
-    packages listed and downloadable from machines in the ``python.org``
-    domain.  The glob patterns must match the *entire* user/host/port section of
-    the target URL(s).  For example, ``*.python.org`` will NOT accept a URL
-    like ``http://python.org/foo`` or ``http://www.python.org:8080/``.
-    Multiple patterns can be specified by separting them with commas.  The
-    default pattern is ``*``, which matches anything.
-
-    In general, this option is mainly useful for blocking EasyInstall's web
-    access altogether (e.g. ``-Hlocalhost``), or to restrict it to an intranet
-    or other trusted site.  EasyInstall will do the best it can to satisfy
-    dependencies given your host restrictions, but of course can fail if it
-    can't find suitable packages.  EasyInstall displays all blocked URLs, so
-    that you can adjust your ``--allow-hosts`` setting if it is more strict
-    than you intended.  Some sites may wish to define a restrictive default
-    setting for this option in their `configuration files`_, and then manually
-    override the setting on the command line as needed.
-
-``--prefix=DIR`` (New in 0.6a10)
-    Use the specified directory as a base for computing the default
-    installation and script directories.  On Windows, the resulting default
-    directories will be ``prefix\\Lib\\site-packages`` and ``prefix\\Scripts``,
-    while on other platforms the defaults will be
-    ``prefix/lib/python2.X/site-packages`` (with the appropriate version
-    substituted) for libraries and ``prefix/bin`` for scripts.
-
-    Note that the ``--prefix`` option only sets the *default* installation and
-    script directories, and does not override the ones set on the command line
-    or in a configuration file.
-
-``--local-snapshots-ok, -l`` (New in 0.6c6)
-    Normally, EasyInstall prefers to only install *released* versions of
-    projects, not in-development ones, because such projects may not
-    have a currently-valid version number.  So, it usually only installs them
-    when their ``setup.py`` directory is explicitly passed on the command line.
-
-    However, if this option is used, then any in-development projects that were
-    installed using the ``setup.py develop`` command, will be used to build
-    eggs, effectively upgrading the "in-development" project to a snapshot
-    release.  Normally, this option is used only in conjunction with the
-    ``--always-copy`` option to create a distributable snapshot of every egg
-    needed to run an application.
-
-    Note that if you use this option, you must make sure that there is a valid
-    version number (such as an SVN revision number tag) for any in-development
-    projects that may be used, as otherwise EasyInstall may not be able to tell
-    what version of the project is "newer" when future installations or
-    upgrades are attempted.
-
-
-.. _non-root installation:
-
-Custom Installation Locations
------------------------------
-
-By default, EasyInstall installs python packages into Python's main ``site-packages`` directory,
-and manages them using a custom ``.pth`` file in that same directory.
-
-Very often though, a user or developer wants ``easy_install`` to install and manage python packages
-in an alternative location, usually for one of 3 reasons:
-
-1. They don't have access to write to the main Python site-packages directory.
-
-2. They want a user-specific stash of packages, that is not visible to other users.
-
-3. They want to isolate a set of packages to a specific python application, usually to minimize
-   the possibility of version conflicts.
-
-Historically, there have been many approaches to achieve custom installation.
-The following section lists only the easiest and most relevant approaches [1]_.
-
-`Use the "--user" option`_
-
-`Use the "--user" option and customize "PYTHONUSERBASE"`_
-
-`Use "virtualenv"`_
-
-.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by `PEP-370`_ in Python 2.6.
-
-.. _PEP-370: http://www.python.org/dev/peps/pep-0370/
-
-
-Use the "--user" option
-~~~~~~~~~~~~~~~~~~~~~~~
-With Python 2.6 came the User scheme for installation, which means that all
-python distributions support an alternative install location that is specific to a user [2]_ [3]_.
-The Default location for each OS is explained in the python documentation
-for the ``site.USER_BASE`` variable.  This mode of installation can be turned on by
-specifying the ``--user`` option to ``setup.py install`` or ``easy_install``.
-This approach serves the need to have a user-specific stash of packages.
-
-.. [2] Prior to Python2.6, Mac OS X offered a form of the User scheme. That is now subsumed into the User scheme introduced in Python 2.6.
-.. [3] Prior to the User scheme, there was the Home scheme, which is still available, but requires more effort than the User scheme to get packages recognized.
-
-Use the "--user" option and customize "PYTHONUSERBASE"
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The User scheme install location can be customized by setting the ``PYTHONUSERBASE`` environment
-variable, which updates the value of ``site.USER_BASE``.  To isolate packages to a specific
-application, simply set the OS environment of that application to a specific value of
-``PYTHONUSERBASE``, that contains just those packages.
-
-Use "virtualenv"
-~~~~~~~~~~~~~~~~
-"virtualenv" is a 3rd-party python package that effectively "clones" a python installation, thereby
-creating an isolated location to intall packages.  The evolution of "virtualenv" started before the existence
-of the User installation scheme.  "virtualenv" provides a version of ``easy_install`` that is
-scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features
-that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages.
-
-Please refer to the `virtualenv`_ documentation for more details.
-
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
-
-
-
-Package Index "API"
--------------------
-
-Custom package indexes (and PyPI) must follow the following rules for
-EasyInstall to be able to look up and download packages:
-
-1. Except where stated otherwise, "pages" are HTML or XHTML, and "links"
-   refer to ``href`` attributes.
-
-2. Individual project version pages' URLs must be of the form
-   ``base/projectname/version``, where ``base`` is the package index's base URL.
-
-3. Omitting the ``/version`` part of a project page's URL (but keeping the
-   trailing ``/``) should result in a page that is either:
-
-   a) The single active version of that project, as though the version had been
-      explicitly included, OR
-
-   b) A page with links to all of the active version pages for that project.
-
-4. Individual project version pages should contain direct links to downloadable
-   distributions where possible.  It is explicitly permitted for a project's
-   "long_description" to include URLs, and these should be formatted as HTML
-   links by the package index, as EasyInstall does no special processing to
-   identify what parts of a page are index-specific and which are part of the
-   project's supplied description.
-
-5. Where available, MD5 information should be added to download URLs by
-   appending a fragment identifier of the form ``#md5=...``, where ``...`` is
-   the 32-character hex MD5 digest.  EasyInstall will verify that the
-   downloaded file's MD5 digest matches the given value.
-
-6. Individual project version pages should identify any "homepage" or
-   "download" URLs using ``rel="homepage"`` and ``rel="download"`` attributes
-   on the HTML elements linking to those URLs. Use of these attributes will
-   cause EasyInstall to always follow the provided links, unless it can be
-   determined by inspection that they are downloadable distributions. If the
-   links are not to downloadable distributions, they are retrieved, and if they
-   are HTML, they are scanned for download links. They are *not* scanned for
-   additional "homepage" or "download" links, as these are only processed for
-   pages that are part of a package index site.
-
-7. The root URL of the index, if retrieved with a trailing ``/``, must result
-   in a page containing links to *all* projects' active version pages.
-
-   (Note: This requirement is a workaround for the absence of case-insensitive
-   ``safe_name()`` matching of project names in URL paths. If project names are
-   matched in this fashion (e.g. via the PyPI server, mod_rewrite, or a similar
-   mechanism), then it is not necessary to include this all-packages listing
-   page.)
-
-8. If a package index is accessed via a ``file://`` URL, then EasyInstall will
-   automatically use ``index.html`` files, if present, when trying to read a
-   directory with a trailing ``/`` on the URL.
-
-
-Backward Compatibility
-~~~~~~~~~~~~~~~~~~~~~~
-
-Package indexes that wish to support setuptools versions prior to 0.6b4 should
-also follow these rules:
-
-* Homepage and download links must be preceded with ``"<th>Home Page"`` or
-  ``"<th>Download URL"``, in addition to (or instead of) the ``rel=""``
-  attributes on the actual links.  These marker strings do not need to be
-  visible, or uncommented, however!  For example, the following is a valid
-  homepage link that will work with any version of setuptools::
-
-    <li>
-     <strong>Home Page:</strong>
-     <!-- <th>Home Page -->
-     <a rel="homepage" href="http://sqlobject.org">http://sqlobject.org</a>
-    </li>
-
-  Even though the marker string is in an HTML comment, older versions of
-  EasyInstall will still "see" it and know that the link that follows is the
-  project's home page URL.
-
-* The pages described by paragraph 3(b) of the preceding section *must*
-  contain the string ``"Index of Packages</title>"`` somewhere in their text.
-  This can be inside of an HTML comment, if desired, and it can be anywhere
-  in the page.  (Note: this string MUST NOT appear on normal project pages, as
-  described in paragraphs 2 and 3(a)!)
-
-In addition, for compatibility with PyPI versions that do not use ``#md5=``
-fragment IDs, EasyInstall uses the following regular expression to match PyPI's
-displayed MD5 info (broken onto two lines for readability)::
-
-    <a href="([^"#]+)">([^<]+)</a>\n\s+\(<a href="[^?]+\?:action=show_md5
-    &amp;digest=([0-9a-f]{32})">md5</a>\)
-
-History
-=======
-
-0.6c9
- * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting
-   is flattened out in the resulting egg.  (There was a case-sensitivity
-   problem that affected some distributions, notably ``pywin32``.)
-
- * Prevent ``--help-commands`` and other junk from showing under Python 2.5
-   when running ``easy_install --help``.
-
- * Fixed GUI scripts sometimes not executing on Windows
-
- * Fixed not picking up dependency links from recursive dependencies.
-
- * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs
-
- * Changes for Jython compatibility
-
- * Improved error message when a requirement is also a directory name, but the
-   specified directory is not a source package.
-
- * Fixed ``--allow-hosts`` option blocking ``file:`` URLs
-
- * Fixed HTTP SVN detection failing when the page title included a project
-   name (e.g. on SourceForge-hosted SVN)
-
- * Fix Jython script installation to handle ``#!`` lines better when
-   ``sys.executable`` is a script.
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Keep site directories (e.g. ``site-packages``) from being included in
-   ``.pth`` files.
-
-0.6c7
- * ``ftp:`` download URLs now work correctly.
-
- * The default ``--index-url`` is now ``http://pypi.python.org/simple``, to use
-   the Python Package Index's new simpler (and faster!) REST API.
-
-0.6c6
- * EasyInstall no longer aborts the installation process if a URL it wants to
-   retrieve can't be downloaded, unless the URL is an actual package download.
-   Instead, it issues a warning and tries to keep going.
-
- * Fixed distutils-style scripts originally built on Windows having their line
-   endings doubled when installed on any platform.
-
- * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects
-   installed using ``setup.py develop``.
-
- * Fixed not HTML-decoding URLs scraped from web pages
-
-0.6c5
- * Fixed ``.dll`` files on Cygwin not having executable permisions when an egg
-   is installed unzipped.
-
-0.6c4
- * Added support for HTTP "Basic" authentication using ``http://user:pass@host``
-   URLs.  If a password-protected page contains links to the same host (and
-   protocol), those links will inherit the credentials used to access the
-   original page.
-
- * Removed all special support for Sourceforge mirrors, as Sourceforge's
-   mirror system now works well for non-browser downloads.
-
- * Fixed not recognizing ``win32.exe`` installers that included a custom
-   bitmap.
-
- * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they
-   are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as
-   is done by ``os.urandom()`` on some platforms).
-
- * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable``
-   has a space in it (e.g., the user installed Python to a ``Program Files``
-   directory).
-
-0.6c3
- * You can once again use "python -m easy_install" with Python 2.4 and above.
-
- * Python 2.5 compatibility fixes added.
-
-0.6c2
- * Windows script wrappers now support quoted arguments and arguments
-   containing spaces.  (Patch contributed by Jim Fulton.)
-
- * The ``ez_setup.py`` script now actually works when you put a setuptools
-   ``.egg`` alongside it for bootstrapping an offline machine.
-
- * A writable installation directory on ``sys.path`` is no longer required to
-   download and extract a source distribution using ``--editable``.
-
- * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable``
-   contains non-ASCII characters, to prevent deprecation warnings about an
-   unspecified encoding when the script is run.
-
-0.6c1
- * EasyInstall now includes setuptools version information in the
-   ``User-Agent`` string sent to websites it visits.
-
-0.6b4
- * Fix creating Python wrappers for non-Python scripts
-
- * Fix ``ftp://`` directory listing URLs from causing a crash when used in the
-   "Home page" or "Download URL" slots on PyPI.
-
- * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile
-   or directory is deleted/overwritten.
-
- * Fix not recognizing HTML 404 pages from package indexes.
-
- * Allow ``file://`` URLs to be used as a package index.  URLs that refer to
-   directories will use an internally-generated directory listing if there is
-   no ``index.html`` file in the directory.
-
- * Allow external links in a package index to be specified using
-   ``rel="homepage"`` or ``rel="download"``, without needing the old
-   PyPI-specific visible markup.
-
- * Suppressed warning message about possibly-misspelled project name, if an egg
-   or link for that project name has already been seen.
-
-0.6b3
- * Fix local ``--find-links`` eggs not being copied except with
-   ``--always-copy``.
-
- * Fix sometimes not detecting local packages installed outside of "site"
-   directories.
-
- * Fix mysterious errors during initial ``setuptools`` install, caused by
-   ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru
-   after deleting the egg from which it's running.
-
-0.6b2
- * Don't install or update a ``site.py`` patch when installing to a
-   ``PYTHONPATH`` directory with ``--multi-version``, unless an
-   ``easy-install.pth`` file is already in use there.
-
- * Construct ``.pth`` file paths in such a way that installing an egg whose
-   name begins with ``import`` doesn't cause a syntax error.
-
- * Fixed a bogus warning message that wasn't updated since the 0.5 versions.
-
-0.6b1
- * Better ambiguity management: accept ``#egg`` name/version even if processing
-   what appears to be a correctly-named distutils file, and ignore ``.egg``
-   files with no ``-``, since valid Python ``.egg`` files always have a version
-   number (but Scheme eggs often don't).
-
- * Support ``file://`` links to directories in ``--find-links``, so that
-   easy_install can build packages from local source checkouts.
-
- * Added automatic retry for Sourceforge mirrors.  The new download process is
-   to first just try dl.sourceforge.net, then randomly select mirror IPs and
-   remove ones that fail, until something works.  The removed IPs stay removed
-   for the remainder of the run.
-
- * Ignore bdist_dumb distributions when looking at download URLs.
-
-0.6a11
- * Process ``dependency_links.txt`` if found in a distribution, by adding the
-   URLs to the list for scanning.
-
- * Use relative paths in ``.pth`` files when eggs are being installed to the
-   same directory as the ``.pth`` file.  This maximizes portability of the
-   target directory when building applications that contain eggs.
-
- * Added ``easy_install-N.N`` script(s) for convenience when using multiple
-   Python versions.
-
- * Added automatic handling of installation conflicts.  Eggs are now shifted to
-   the front of sys.path, in an order consistent with where they came from,
-   making EasyInstall seamlessly co-operate with system package managers.
-
-   The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options
-   are now no longer necessary, and will generate warnings at the end of a
-   run if you use them.
-
- * Don't recursively traverse subdirectories given to ``--find-links``.
-
-0.6a10
- * Added exhaustive testing of the install directory, including a spawn test
-   for ``.pth`` file support, and directory writability/existence checks.  This
-   should virtually eliminate the need to set or configure ``--site-dirs``.
-
- * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of
-   RTFM-ing.  :)
-
- * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it
-   manually to make it work.  ``--multi-version`` is no longer a silent
-   default; you must explicitly use it if installing to a non-PYTHONPATH,
-   non-"site" directory.
-
- * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``,
-   ``--install-dir``, and ``--script-dir`` options, whether on the command line
-   or in configuration files.
-
- * Improved SourceForge mirror processing to work faster and be less affected
-   by transient HTML changes made by SourceForge.
-
- * PyPI searches now use the exact spelling of requirements specified on the
-   command line or in a project's ``install_requires``.  Previously, a
-   normalized form of the name was used, which could lead to unnecessary
-   full-index searches when a project's name had an underscore (``_``) in it.
-
- * EasyInstall can now download bare ``.py`` files and wrap them in an egg,
-   as long as you include an ``#egg=name-version`` suffix on the URL, or if
-   the ``.py`` file is listed as the "Download URL" on the project's PyPI page.
-   This allows third parties to "package" trivial Python modules just by
-   linking to them (e.g. from within their own PyPI page or download links
-   page).
-
- * The ``--always-copy`` option now skips "system" and "development" eggs since
-   they can't be reliably copied.  Note that this may cause EasyInstall to
-   choose an older version of a package than what you expected, or it may cause
-   downloading and installation of a fresh version of what's already installed.
-
- * The ``--find-links`` option previously scanned all supplied URLs and
-   directories as early as possible, but now only directories and direct
-   archive links are scanned immediately.  URLs are not retrieved unless a
-   package search was already going to go online due to a package not being
-   available locally, or due to the use of the ``--update`` or ``-U`` option.
-
- * Fixed the annoying ``--help-commands`` wart.
-
-0.6a9
- * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside
-   "baskets") when they weren't explicitly listed in the ``.pth`` file.
-
- * If more than one URL appears to describe the exact same distribution, prefer
-   the shortest one.  This helps to avoid "table of contents" CGI URLs like the
-   ones on effbot.org.
-
- * Quote arguments to python.exe (including python's path) to avoid problems
-   when Python (or a script) is installed in a directory whose name contains
-   spaces on Windows.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
-   format.  Running ``bdist_wininst`` on a setuptools-based package wraps the
-   egg in an .exe that will safely install it as an egg (i.e., with metadata
-   and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
-   back into an ``.egg`` file or directory and install it as such.
-
-0.6a8
- * Update for changed SourceForge mirror format
-
- * Fixed not installing dependencies for some packages fetched via Subversion
-
- * Fixed dependency installation with ``--always-copy`` not using the same
-   dependency resolution procedure as other operations.
-
- * Fixed not fully removing temporary directories on Windows, if a Subversion
-   checkout left read-only files behind
-
- * Fixed some problems building extensions when Pyrex was installed, especially
-   with Python 2.4 and/or packages using SWIG.
-
-0.6a7
- * Fixed not being able to install Windows script wrappers using Python 2.3
-
-0.6a6
- * Added support for "traditional" PYTHONPATH-based non-root installation, and
-   also the convenient ``virtual-python.py`` script, based on a contribution
-   by Ian Bicking.  The setuptools egg now contains a hacked ``site`` module
-   that makes the PYTHONPATH-based approach work with .pth files, so that you
-   can get the full EasyInstall feature set on such installations.
-
- * Added ``--no-deps`` and ``--allow-hosts`` options.
-
- * Improved Windows ``.exe`` script wrappers so that the script can have the
-   same name as a module without confusing Python.
-
- * Changed dependency processing so that it's breadth-first, allowing a
-   depender's preferences to override those of a dependee, to prevent conflicts
-   when a lower version is acceptable to the dependee, but not the depender.
-   Also, ensure that currently installed/selected packages aren't given
-   precedence over ones desired by a package being installed, which could
-   cause conflict errors.
-
-0.6a3
- * Improved error message when trying to use old ways of running
-   ``easy_install``.  Removed the ability to run via ``python -m`` or by
-   running ``easy_install.py``; ``easy_install`` is the command to run on all
-   supported platforms.
-
- * Improved wrapper script generation and runtime initialization so that a
-   VersionConflict doesn't occur if you later install a competing version of a
-   needed package as the default version of that package.
-
- * Fixed a problem parsing version numbers in ``#egg=`` links.
-
-0.6a2
- * EasyInstall can now install "console_scripts" defined by packages that use
-   ``setuptools`` and define appropriate entry points.  On Windows, console
-   scripts get an ``.exe`` wrapper so you can just type their name.  On other
-   platforms, the scripts are installed without a file extension.
-
- * Using ``python -m easy_install`` or running ``easy_install.py`` is now
-   DEPRECATED, since an ``easy_install`` wrapper is now available on all
-   platforms.
-
-0.6a1
- * EasyInstall now does MD5 validation of downloads from PyPI, or from any link
-   that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest.
-
- * EasyInstall now handles symlinks in target directories by removing the link,
-   rather than attempting to overwrite the link's destination.  This makes it
-   easier to set up an alternate Python "home" directory (as described above in
-   the `Non-Root Installation`_ section).
-
- * Added support for handling MacOS platform information in ``.egg`` filenames,
-   based on a contribution by Kevin Dangoor.  You may wish to delete and
-   reinstall any eggs whose filename includes "darwin" and "Power_Macintosh",
-   because the format for this platform information has changed so that minor
-   OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a
-   previous OS version to become obsolete.
-
- * easy_install's dependency processing algorithms have changed.  When using
-   ``--always-copy``, it now ensures that dependencies are copied too.  When
-   not using ``--always-copy``, it tries to use a single resolution loop,
-   rather than recursing.
-
- * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py``
-   extensions.
-
- * Added ``--site-dirs`` option to allow adding custom "site" directories.
-   Made ``easy-install.pth`` work in platform-specific alternate site
-   directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X).
-
- * If you manually delete the current version of a package, the next run of
-   EasyInstall against the target directory will now remove the stray entry
-   from the ``easy-install.pth`` file.
-
- * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID
-   as pointing to the named project's source checkout.  Such URLs have a lower
-   match precedence than any other kind of distribution, so they'll only be
-   used if they have a higher version number than any other available
-   distribution, or if you use the ``--editable`` option.  The ``#egg``
-   fragment can contain a version if it's formatted as ``#egg=proj-ver``,
-   where ``proj`` is the project name, and ``ver`` is the version number.  You
-   *must* use the format for these values that the ``bdist_egg`` command uses;
-   i.e., all non-alphanumeric runs must be condensed to single underscore
-   characters.
-
- * Added the ``--editable`` option; see `Editing and Viewing Source Packages`_
-   above for more info.  Also, slightly changed the behavior of the
-   ``--build-directory`` option.
-
- * Fixed the setup script sandbox facility not recognizing certain paths as
-   valid on case-insensitive platforms.
-
-0.5a12
- * Fix ``python -m easy_install`` not working due to setuptools being installed
-   as a zipfile.  Update safety scanner to check for modules that might be used
-   as ``python -m`` scripts.
-
- * Misc. fixes for win32.exe support, including changes to support Python 2.4's
-   changed ``bdist_wininst`` format.
-
-0.5a10
- * Put the ``easy_install`` module back in as a module, as it's needed for
-   ``python -m`` to run it!
-
- * Allow ``--find-links/-f`` to accept local directories or filenames as well
-   as URLs.
-
-0.5a9
- * EasyInstall now automatically detects when an "unmanaged" package or
-   module is going to be on ``sys.path`` ahead of a package you're installing,
-   thereby preventing the newer version from being imported.  By default, it
-   will abort installation to alert you of the problem, but there are also
-   new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``)
-   available to change the default behavior.  (Note: this new feature doesn't
-   take effect for egg files that were built with older ``setuptools``
-   versions, because they lack the new metadata file required to implement it.)
-
- * The ``easy_install`` distutils command now uses ``DistutilsError`` as its
-   base error type for errors that should just issue a message to stderr and
-   exit the program without a traceback.
-
- * EasyInstall can now be given a path to a directory containing a setup
-   script, and it will attempt to build and install the package there.
-
- * EasyInstall now performs a safety analysis on module contents to determine
-   whether a package is likely to run in zipped form, and displays
-   information about what modules may be doing introspection that would break
-   when running as a zipfile.
-
- * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that
-   would ordinarily be considered safe to unzip, and changed the meaning of
-   ``--zip-ok/-z`` to "always leave everything zipped".
-
-0.5a8
- * There is now a separate documentation page for `setuptools`_; revision
-   history that's not specific to EasyInstall has been moved to that page.
-
- .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
-
-0.5a5
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
-   the ``easy_install`` module to ``setuptools.command.easy_install``.  Note
-   that if you were importing or extending it, you must now change your imports
-   accordingly.  ``easy_install.py`` is still installed as a script, but not as
-   a module.
-
-0.5a4
- * Added ``--always-copy/-a`` option to always copy needed packages to the
-   installation directory, even if they're already present elsewhere on
-   sys.path. (In previous versions, this was the default behavior, but now
-   you must request it.)
-
- * Added ``--upgrade/-U`` option to force checking PyPI for latest available
-   version(s) of all packages requested by name and version, even if a matching
-   version is available locally.
-
- * Added automatic installation of dependencies declared by a distribution
-   being installed.  These dependencies must be listed in the distribution's
-   ``EGG-INFO`` directory, so the distribution has to have declared its
-   dependencies by using setuptools.  If a package has requirements it didn't
-   declare, you'll still have to deal with them yourself.  (E.g., by asking
-   EasyInstall to find and install them.)
-
- * Added the ``--record`` option to ``easy_install`` for the benefit of tools
-   that run ``setup.py install --record=filename`` on behalf of another
-   packaging system.)
-
-0.5a3
- * Fixed not setting script permissions to allow execution.
-
- * Improved sandboxing so that setup scripts that want a temporary directory
-   (e.g. pychecker) can still run in the sandbox.
-
-0.5a2
- * Fix stupid stupid refactoring-at-the-last-minute typos.  :(
-
-0.5a1
- * Added support for converting ``.win32.exe`` installers to eggs on the fly.
-   EasyInstall will now recognize such files by name and install them.
-
- * Fixed a problem with picking the "best" version to install (versions were
-   being sorted as strings, rather than as parsed values)
-
-0.4a4
- * Added support for the distutils "verbose/quiet" and "dry-run" options, as
-   well as the "optimize" flag.
-
- * Support downloading packages that were uploaded to PyPI (by scanning all
-   links on package pages, not just the homepage/download links).
-
-0.4a3
- * Add progress messages to the search/download process so that you can tell
-   what URLs it's reading to find download links.  (Hopefully, this will help
-   people report out-of-date and broken links to package authors, and to tell
-   when they've asked for a package that doesn't exist.)
-
-0.4a2
- * Added support for installing scripts
-
- * Added support for setting options via distutils configuration files, and
-   using distutils' default options as a basis for EasyInstall's defaults.
-
- * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the
-   script installation directory option.
-
- * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if
-   Python includes SSL support.
-
-0.4a1
- * Added ``--scan-url`` and ``--index-url`` options, to scan download pages
-   and search PyPI for needed packages.
-
-0.3a4
- * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single
-   URL installs, to avoid running the wrong setup.py.
-
-0.3a3
- * Added ``--build-directory=DIR/-b DIR`` option.
-
- * Added "installation report" that explains how to use 'require()' when doing
-   a multiversion install or alternate installation directory.
-
- * Added SourceForge mirror auto-select (Contributed by Ian Bicking)
-
- * Added "sandboxing" that stops a setup script from running if it attempts to
-   write to the filesystem outside of the build area
-
- * Added more workarounds for packages with quirky ``install_data`` hacks
-
-0.3a2
- * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as
-   automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking)
-
- * Misc. bug fixes
-
-0.3a1
- * Initial release.
-
-
-Future Plans
-============
-
-* Additional utilities to list/remove/verify packages
-* Signature checking?  SSL?  Ability to suppress PyPI search?
-* Display byte progress meter when downloading distributions and long pages?
-* Redirect stdout/stderr to log during run_setup?
-
diff --git a/vendor/distribute-0.6.34/docs/index.txt b/vendor/distribute-0.6.34/docs/index.txt
deleted file mode 100644
index 5f3b945b200a8a0504d65a1aaf28892d0243a037..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/index.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-Welcome to Distribute's documentation!
-======================================
-
-`Distribute` is a fork of the `Setuptools` project.
-
-Distribute is intended to replace Setuptools as the standard method for
-working with Python module distributions.
-
-For those who may wonder why they should switch to Distribute over Setuptools, it’s quite simple:
-
-- Distribute is a drop-in replacement for Setuptools
-- The code is actively maintained, and has over 10 commiters
-- Distribute offers Python 3 support !
-
-Documentation content:
-
-.. toctree::
-   :maxdepth: 2
-
-   roadmap
-   python3
-   using
-   setuptools
-   easy_install
-   pkg_resources
-
-
-.. image:: http://python-distribute.org/pip_distribute.png
-
-Design done by Idan Gazit (http://pixane.com) - License: cc-by-3.0
-
-Copy & paste::
-
- curl -O http://python-distribute.org/distribute_setup.py
- python distribute_setup.py
- easy_install pip
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/docs/pkg_resources.txt b/vendor/distribute-0.6.34/docs/pkg_resources.txt
deleted file mode 100644
index 480f9547ceb1fdf60c55e2e12a393a49ca84207f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/pkg_resources.txt
+++ /dev/null
@@ -1,1955 +0,0 @@
-=============================================================
-Package Discovery and Resource Access using ``pkg_resources``
-=============================================================
-
-The ``pkg_resources`` module distributed with ``setuptools`` provides an API
-for Python libraries to access their resource files, and for extensible
-applications and frameworks to automatically discover plugins.  It also
-provides runtime support for using C extensions that are inside zipfile-format
-eggs, support for merging packages that have separately-distributed modules or
-subpackages, and APIs for managing Python's current "working set" of active
-packages.
-
-
-.. contents:: **Table of Contents**
-
-
---------
-Overview
---------
-
-Eggs are a distribution format for Python modules, similar in concept to Java's
-"jars" or Ruby's "gems".  They differ from previous Python distribution formats
-in that they are importable (i.e. they can be added to ``sys.path``), and they
-are *discoverable*, meaning that they carry metadata that unambiguously
-identifies their contents and dependencies, and thus can be *automatically*
-found and added to ``sys.path`` in response to simple requests of the form,
-"get me everything I need to use docutils' PDF support".
-
-The ``pkg_resources`` module provides runtime facilities for finding,
-introspecting, activating and using eggs and other "pluggable" distribution
-formats.  Because these are new concepts in Python (and not that well-
-established in other languages either), it helps to have a few special terms
-for talking about eggs and how they can be used:
-
-project
-    A library, framework, script, plugin, application, or collection of data
-    or other resources, or some combination thereof.  Projects are assumed to
-    have "relatively unique" names, e.g. names registered with PyPI.
-
-release
-    A snapshot of a project at a particular point in time, denoted by a version
-    identifier.
-
-distribution
-    A file or files that represent a particular release.
-
-importable distribution
-    A file or directory that, if placed on ``sys.path``, allows Python to
-    import any modules contained within it.
-
-pluggable distribution
-    An importable distribution whose filename unambiguously identifies its
-    release (i.e. project and version), and whose contents unamabiguously
-    specify what releases of other projects will satisfy its runtime
-    requirements.
-
-extra
-    An "extra" is an optional feature of a release, that may impose additional
-    runtime requirements.  For example, if docutils PDF support required a
-    PDF support library to be present, docutils could define its PDF support as
-    an "extra", and list what other project releases need to be available in
-    order to provide it.
-
-environment
-    A collection of distributions potentially available for importing, but not
-    necessarily active.  More than one distribution (i.e. release version) for
-    a given project may be present in an environment.
-
-working set
-    A collection of distributions actually available for importing, as on
-    ``sys.path``.  At most one distribution (release version) of a given
-    project may be present in a working set, as otherwise there would be
-    ambiguity as to what to import.
-
-eggs
-    Eggs are pluggable distributions in one of the three formats currently
-    supported by ``pkg_resources``.  There are built eggs, development eggs,
-    and egg links.  Built eggs are directories or zipfiles whose name ends
-    with ``.egg`` and follows the egg naming conventions, and contain an
-    ``EGG-INFO`` subdirectory (zipped or otherwise).  Development eggs are
-    normal directories of Python code with one or more ``ProjectName.egg-info``
-    subdirectories.  And egg links are ``*.egg-link`` files that contain the
-    name of a built or development egg, to support symbolic linking on
-    platforms that do not have native symbolic links.
-
-(For more information about these terms and concepts, see also this
-`architectural overview`_ of ``pkg_resources`` and Python Eggs in general.)
-
-.. _architectural overview: http://mail.python.org/pipermail/distutils-sig/2005-June/004652.html
-
-
-.. -----------------
-.. Developer's Guide
-.. -----------------
-
-.. This section isn't written yet.  Currently planned topics include
-    Accessing Resources
-    Finding and Activating Package Distributions
-        get_provider()
-        require()
-        WorkingSet
-        iter_distributions
-    Running Scripts
-    Configuration
-    Namespace Packages
-    Extensible Applications and Frameworks
-        Locating entry points
-        Activation listeners
-        Metadata access
-        Extended Discovery and Installation
-    Supporting Custom PEP 302 Implementations
-.. For now, please check out the extensive `API Reference`_ below.
-
-
--------------
-API Reference
--------------
-
-Namespace Package Support
-=========================
-
-A namespace package is a package that only contains other packages and modules,
-with no direct contents of its own.  Such packages can be split across
-multiple, separately-packaged distributions.  Normally, you do not need to use
-the namespace package APIs directly; instead you should supply the
-``namespace_packages`` argument to ``setup()`` in your project's ``setup.py``.
-See the `setuptools documentation on namespace packages`_ for more information.
-
-However, if for some reason you need to manipulate namespace packages or
-directly alter ``sys.path`` at runtime, you may find these APIs useful:
-
-``declare_namespace(name)``
-    Declare that the dotted package name `name` is a "namespace package" whose
-    contained packages and modules may be spread across multiple distributions.
-    The named package's ``__path__`` will be extended to include the
-    corresponding package in all distributions on ``sys.path`` that contain a
-    package of that name.  (More precisely, if an importer's
-    ``find_module(name)`` returns a loader, then it will also be searched for
-    the package's contents.)  Whenever a Distribution's ``activate()`` method
-    is invoked, it checks for the presence of namespace packages and updates
-    their ``__path__`` contents accordingly.
-
-Applications that manipulate namespace packages or directly alter ``sys.path``
-at runtime may also need to use this API function:
-
-``fixup_namespace_packages(path_item)``
-    Declare that `path_item` is a newly added item on ``sys.path`` that may
-    need to be used to update existing namespace packages.  Ordinarily, this is
-    called for you when an egg is automatically added to ``sys.path``, but if
-    your application modifies ``sys.path`` to include locations that may
-    contain portions of a namespace package, you will need to call this
-    function to ensure they are added to the existing namespace packages.
-
-Although by default ``pkg_resources`` only supports namespace packages for
-filesystem and zip importers, you can extend its support to other "importers"
-compatible with PEP 302 using the ``register_namespace_handler()`` function.
-See the section below on `Supporting Custom Importers`_ for details.
-
-.. _setuptools documentation on namespace packages: http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
-
-
-``WorkingSet`` Objects
-======================
-
-The ``WorkingSet`` class provides access to a collection of "active"
-distributions.  In general, there is only one meaningful ``WorkingSet``
-instance: the one that represents the distributions that are currently active
-on ``sys.path``.  This global instance is available under the name
-``working_set`` in the ``pkg_resources`` module.  However, specialized
-tools may wish to manipulate working sets that don't correspond to
-``sys.path``, and therefore may wish to create other ``WorkingSet`` instances.
-
-It's important to note that the global ``working_set`` object is initialized
-from ``sys.path`` when ``pkg_resources`` is first imported, but is only updated
-if you do all future ``sys.path`` manipulation via ``pkg_resources`` APIs.  If
-you manually modify ``sys.path``, you must invoke the appropriate methods on
-the ``working_set`` instance to keep it in sync.  Unfortunately, Python does
-not provide any way to detect arbitrary changes to a list object like
-``sys.path``, so ``pkg_resources`` cannot automatically update the
-``working_set`` based on changes to ``sys.path``.
-
-``WorkingSet(entries=None)``
-    Create a ``WorkingSet`` from an iterable of path entries.  If `entries`
-    is not supplied, it defaults to the value of ``sys.path`` at the time
-    the constructor is called.
-
-    Note that you will not normally construct ``WorkingSet`` instances
-    yourself, but instead you will implicitly or explicitly use the global
-    ``working_set`` instance.  For the most part, the ``pkg_resources`` API
-    is designed so that the ``working_set`` is used by default, such that you
-    don't have to explicitly refer to it most of the time.
-
-
-Basic ``WorkingSet`` Methods
-----------------------------
-
-The following methods of ``WorkingSet`` objects are also available as module-
-level functions in ``pkg_resources`` that apply to the default ``working_set``
-instance.  Thus, you can use e.g. ``pkg_resources.require()`` as an
-abbreviation for ``pkg_resources.working_set.require()``:
-
-
-``require(*requirements)``
-    Ensure that distributions matching `requirements` are activated
-
-    `requirements` must be a string or a (possibly-nested) sequence
-    thereof, specifying the distributions and versions required.  The
-    return value is a sequence of the distributions that needed to be
-    activated to fulfill the requirements; all relevant distributions are
-    included, even if they were already activated in this working set.
-
-    For the syntax of requirement specifiers, see the section below on
-    `Requirements Parsing`_.
-
-    In general, it should not be necessary for you to call this method
-    directly.  It's intended more for use in quick-and-dirty scripting and
-    interactive interpreter hacking than for production use. If you're creating
-    an actual library or application, it's strongly recommended that you create
-    a "setup.py" script using ``setuptools``, and declare all your requirements
-    there.  That way, tools like EasyInstall can automatically detect what
-    requirements your package has, and deal with them accordingly.
-
-    Note that calling ``require('SomePackage')`` will not install
-    ``SomePackage`` if it isn't already present.  If you need to do this, you
-    should use the ``resolve()`` method instead, which allows you to pass an
-    ``installer`` callback that will be invoked when a needed distribution
-    can't be found on the local machine.  You can then have this callback
-    display a dialog, automatically download the needed distribution, or
-    whatever else is appropriate for your application. See the documentation
-    below on the ``resolve()`` method for more information, and also on the
-    ``obtain()`` method of ``Environment`` objects.
-
-``run_script(requires, script_name)``
-    Locate distribution specified by `requires` and run its `script_name`
-    script.  `requires` must be a string containing a requirement specifier.
-    (See `Requirements Parsing`_ below for the syntax.)
-
-    The script, if found, will be executed in *the caller's globals*.  That's
-    because this method is intended to be called from wrapper scripts that
-    act as a proxy for the "real" scripts in a distribution.  A wrapper script
-    usually doesn't need to do anything but invoke this function with the
-    correct arguments.
-
-    If you need more control over the script execution environment, you
-    probably want to use the ``run_script()`` method of a ``Distribution``
-    object's `Metadata API`_ instead.
-
-``iter_entry_points(group, name=None)``
-    Yield entry point objects from `group` matching `name`
-
-    If `name` is None, yields all entry points in `group` from all
-    distributions in the working set, otherwise only ones matching both
-    `group` and `name` are yielded.  Entry points are yielded from the active
-    distributions in the order that the distributions appear in the working
-    set.  (For the global ``working_set``, this should be the same as the order
-    that they are listed in ``sys.path``.)  Note that within the entry points
-    advertised by an individual distribution, there is no particular ordering.
-
-    Please see the section below on `Entry Points`_ for more information.
-
-
-``WorkingSet`` Methods and Attributes
--------------------------------------
-
-These methods are used to query or manipulate the contents of a specific
-working set, so they must be explicitly invoked on a particular ``WorkingSet``
-instance:
-
-``add_entry(entry)``
-    Add a path item to the ``entries``, finding any distributions on it.  You
-    should use this when you add additional items to ``sys.path`` and you want
-    the global ``working_set`` to reflect the change.  This method is also
-    called by the ``WorkingSet()`` constructor during initialization.
-
-    This method uses ``find_distributions(entry,True)`` to find distributions
-    corresponding to the path entry, and then ``add()`` them.  `entry` is
-    always appended to the ``entries`` attribute, even if it is already
-    present, however. (This is because ``sys.path`` can contain the same value
-    more than once, and the ``entries`` attribute should be able to reflect
-    this.)
-
-``__contains__(dist)``
-    True if `dist` is active in this ``WorkingSet``.  Note that only one
-    distribution for a given project can be active in a given ``WorkingSet``.
-
-``__iter__()``
-    Yield distributions for non-duplicate projects in the working set.
-    The yield order is the order in which the items' path entries were
-    added to the working set.
-
-``find(req)``
-    Find a distribution matching `req` (a ``Requirement`` instance).
-    If there is an active distribution for the requested project, this
-    returns it, as long as it meets the version requirement specified by
-    `req`.  But, if there is an active distribution for the project and it
-    does *not* meet the `req` requirement, ``VersionConflict`` is raised.
-    If there is no active distribution for the requested project, ``None``
-    is returned.
-
-``resolve(requirements, env=None, installer=None)``
-    List all distributions needed to (recursively) meet `requirements`
-
-    `requirements` must be a sequence of ``Requirement`` objects.  `env`,
-    if supplied, should be an ``Environment`` instance.  If
-    not supplied, an ``Environment`` is created from the working set's
-    ``entries``.  `installer`, if supplied, will be invoked with each
-    requirement that cannot be met by an already-installed distribution; it
-    should return a ``Distribution`` or ``None``.  (See the ``obtain()`` method
-    of `Environment Objects`_, below, for more information on the `installer`
-    argument.)
-
-``add(dist, entry=None)``
-    Add `dist` to working set, associated with `entry`
-
-    If `entry` is unspecified, it defaults to ``dist.location``.  On exit from
-    this routine, `entry` is added to the end of the working set's ``.entries``
-    (if it wasn't already present).
-
-    `dist` is only added to the working set if it's for a project that
-    doesn't already have a distribution active in the set.  If it's
-    successfully added, any  callbacks registered with the ``subscribe()``
-    method will be called.  (See `Receiving Change Notifications`_, below.)
-
-    Note: ``add()`` is automatically called for you by the ``require()``
-    method, so you don't normally need to use this method directly.
-
-``entries``
-    This attribute represents a "shadow" ``sys.path``, primarily useful for
-    debugging.  If you are experiencing import problems, you should check
-    the global ``working_set`` object's ``entries`` against ``sys.path``, to
-    ensure that they match.  If they do not, then some part of your program
-    is manipulating ``sys.path`` without updating the ``working_set``
-    accordingly.  IMPORTANT NOTE: do not directly manipulate this attribute!
-    Setting it equal to ``sys.path`` will not fix your problem, any more than
-    putting black tape over an "engine warning" light will fix your car!  If
-    this attribute is out of sync with ``sys.path``, it's merely an *indicator*
-    of the problem, not the cause of it.
-
-
-Receiving Change Notifications
-------------------------------
-
-Extensible applications and frameworks may need to receive notification when
-a new distribution (such as a plug-in component) has been added to a working
-set.  This is what the ``subscribe()`` method and ``add_activation_listener()``
-function are for.
-
-``subscribe(callback)``
-    Invoke ``callback(distribution)`` once for each active distribution that is
-    in the set now, or gets added later.  Because the callback is invoked for
-    already-active distributions, you do not need to loop over the working set
-    yourself to deal with the existing items; just register the callback and
-    be prepared for the fact that it will be called immediately by this method.
-
-    Note that callbacks *must not* allow exceptions to propagate, or they will
-    interfere with the operation of other callbacks and possibly result in an
-    inconsistent working set state.  Callbacks should use a try/except block
-    to ignore, log, or otherwise process any errors, especially since the code
-    that caused the callback to be invoked is unlikely to be able to handle
-    the errors any better than the callback itself.
-
-``pkg_resources.add_activation_listener()`` is an alternate spelling of
-``pkg_resources.working_set.subscribe()``.
-
-
-Locating Plugins
-----------------
-
-Extensible applications will sometimes have a "plugin directory" or a set of
-plugin directories, from which they want to load entry points or other
-metadata.  The ``find_plugins()`` method allows you to do this, by scanning an
-environment for the newest version of each project that can be safely loaded
-without conflicts or missing requirements.
-
-``find_plugins(plugin_env, full_env=None, fallback=True)``
-   Scan `plugin_env` and identify which distributions could be added to this
-   working set without version conflicts or missing requirements.
-
-   Example usage::
-
-       distributions, errors = working_set.find_plugins(
-           Environment(plugin_dirlist)
-       )
-       map(working_set.add, distributions)  # add plugins+libs to sys.path
-       print "Couldn't load", errors        # display errors
-
-   The `plugin_env` should be an ``Environment`` instance that contains only
-   distributions that are in the project's "plugin directory" or directories.
-   The `full_env`, if supplied, should be an ``Environment`` instance that
-   contains all currently-available distributions.
-
-   If `full_env` is not supplied, one is created automatically from the
-   ``WorkingSet`` this method is called on, which will typically mean that
-   every directory on ``sys.path`` will be scanned for distributions.
-
-   This method returns a 2-tuple: (`distributions`, `error_info`), where
-   `distributions` is a list of the distributions found in `plugin_env` that
-   were loadable, along with any other distributions that are needed to resolve
-   their dependencies.  `error_info` is a dictionary mapping unloadable plugin
-   distributions to an exception instance describing the error that occurred.
-   Usually this will be a ``DistributionNotFound`` or ``VersionConflict``
-   instance.
-
-   Most applications will use this method mainly on the master ``working_set``
-   instance in ``pkg_resources``, and then immediately add the returned
-   distributions to the working set so that they are available on sys.path.
-   This will make it possible to find any entry points, and allow any other
-   metadata tracking and hooks to be activated.
-
-   The resolution algorithm used by ``find_plugins()`` is as follows.  First,
-   the project names of the distributions present in `plugin_env` are sorted.
-   Then, each project's eggs are tried in descending version order (i.e.,
-   newest version first).
-
-   An attempt is made to resolve each egg's dependencies. If the attempt is
-   successful, the egg and its dependencies are added to the output list and to
-   a temporary copy of the working set.  The resolution process continues with
-   the next project name, and no older eggs for that project are tried.
-
-   If the resolution attempt fails, however, the error is added to the error
-   dictionary.  If the `fallback` flag is true, the next older version of the
-   plugin is tried, until a working version is found.  If false, the resolution
-   process continues with the next plugin project name.
-
-   Some applications may have stricter fallback requirements than others. For
-   example, an application that has a database schema or persistent objects
-   may not be able to safely downgrade a version of a package. Others may want
-   to ensure that a new plugin configuration is either 100% good or else
-   revert to a known-good configuration.  (That is, they may wish to revert to
-   a known configuration if the `error_info` return value is non-empty.)
-
-   Note that this algorithm gives precedence to satisfying the dependencies of
-   alphabetically prior project names in case of version conflicts. If two
-   projects named "AaronsPlugin" and "ZekesPlugin" both need different versions
-   of "TomsLibrary", then "AaronsPlugin" will win and "ZekesPlugin" will be
-   disabled due to version conflict.
-
-
-``Environment`` Objects
-=======================
-
-An "environment" is a collection of ``Distribution`` objects, usually ones
-that are present and potentially importable on the current platform.
-``Environment`` objects are used by ``pkg_resources`` to index available
-distributions during dependency resolution.
-
-``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)``
-    Create an environment snapshot by scanning `search_path` for distributions
-    compatible with `platform` and `python`.  `search_path` should be a
-    sequence of strings such as might be used on ``sys.path``.  If a
-    `search_path` isn't supplied, ``sys.path`` is used.
-
-    `platform` is an optional string specifying the name of the platform
-    that platform-specific distributions must be compatible with.  If
-    unspecified, it defaults to the current platform.  `python` is an
-    optional string naming the desired version of Python (e.g. ``'2.4'``);
-    it defaults to the currently-running version.
-
-    You may explicitly set `platform` (and/or `python`) to ``None`` if you
-    wish to include *all* distributions, not just those compatible with the
-    running platform or Python version.
-
-    Note that `search_path` is scanned immediately for distributions, and the
-    resulting ``Environment`` is a snapshot of the found distributions.  It
-    is not automatically updated if the system's state changes due to e.g.
-    installation or removal of distributions.
-
-``__getitem__(project_name)``
-    Returns a list of distributions for the given project name, ordered
-    from newest to oldest version.  (And highest to lowest format precedence
-    for distributions that contain the same version of the project.)  If there
-    are no distributions for the project, returns an empty list.
-
-``__iter__()``
-    Yield the unique project names of the distributions in this environment.
-    The yielded names are always in lower case.
-
-``add(dist)``
-    Add `dist` to the environment if it matches the platform and python version
-    specified at creation time, and only if the distribution hasn't already
-    been added. (i.e., adding the same distribution more than once is a no-op.)
-
-``remove(dist)``
-    Remove `dist` from the environment.
-
-``can_add(dist)``
-    Is distribution `dist` acceptable for this environment?  If it's not
-    compatible with the ``platform`` and ``python`` version values specified
-    when the environment was created, a false value is returned.
-
-``__add__(dist_or_env)``  (``+`` operator)
-    Add a distribution or environment to an ``Environment`` instance, returning
-    a *new* environment object that contains all the distributions previously
-    contained by both.  The new environment will have a ``platform`` and
-    ``python`` of ``None``, meaning that it will not reject any distributions
-    from being added to it; it will simply accept whatever is added.  If you
-    want the added items to be filtered for platform and Python version, or
-    you want to add them to the *same* environment instance, you should use
-    in-place addition (``+=``) instead.
-
-``__iadd__(dist_or_env)``  (``+=`` operator)
-    Add a distribution or environment to an ``Environment`` instance
-    *in-place*, updating the existing instance and returning it.  The
-    ``platform`` and ``python`` filter attributes take effect, so distributions
-    in the source that do not have a suitable platform string or Python version
-    are silently ignored.
-
-``best_match(req, working_set, installer=None)``
-    Find distribution best matching `req` and usable on `working_set`
-
-    This calls the ``find(req)`` method of the `working_set` to see if a
-    suitable distribution is already active.  (This may raise
-    ``VersionConflict`` if an unsuitable version of the project is already
-    active in the specified `working_set`.)  If a suitable distribution isn't
-    active, this method returns the newest distribution in the environment
-    that meets the ``Requirement`` in `req`.  If no suitable distribution is
-    found, and `installer` is supplied, then the result of calling
-    the environment's ``obtain(req, installer)`` method will be returned.
-
-``obtain(requirement, installer=None)``
-    Obtain a distro that matches requirement (e.g. via download).  In the
-    base ``Environment`` class, this routine just returns
-    ``installer(requirement)``, unless `installer` is None, in which case
-    None is returned instead.  This method is a hook that allows subclasses
-    to attempt other ways of obtaining a distribution before falling back
-    to the `installer` argument.
-
-``scan(search_path=None)``
-    Scan `search_path` for distributions usable on `platform`
-
-    Any distributions found are added to the environment.  `search_path` should
-    be a sequence of strings such as might be used on ``sys.path``.  If not
-    supplied, ``sys.path`` is used.  Only distributions conforming to
-    the platform/python version defined at initialization are added.  This
-    method is a shortcut for using the ``find_distributions()`` function to
-    find the distributions from each item in `search_path`, and then calling
-    ``add()`` to add each one to the environment.
-
-
-``Requirement`` Objects
-=======================
-
-``Requirement`` objects express what versions of a project are suitable for
-some purpose.  These objects (or their string form) are used by various
-``pkg_resources`` APIs in order to find distributions that a script or
-distribution needs.
-
-
-Requirements Parsing
---------------------
-
-``parse_requirements(s)``
-    Yield ``Requirement`` objects for a string or iterable of lines.  Each
-    requirement must start on a new line.  See below for syntax.
-
-``Requirement.parse(s)``
-    Create a ``Requirement`` object from a string or iterable of lines.  A
-    ``ValueError`` is raised if the string or lines do not contain a valid
-    requirement specifier, or if they contain more than one specifier.  (To
-    parse multiple specifiers from a string or iterable of strings, use
-    ``parse_requirements()`` instead.)
-
-    The syntax of a requirement specifier can be defined in EBNF as follows::
-
-        requirement  ::= project_name versionspec? extras?
-        versionspec  ::= comparison version (',' comparison version)*
-        comparison   ::= '<' | '<=' | '!=' | '==' | '>=' | '>'
-        extras       ::= '[' extralist? ']'
-        extralist    ::= identifier (',' identifier)*
-        project_name ::= identifier
-        identifier   ::= [-A-Za-z0-9_]+
-        version      ::= [-A-Za-z0-9_.]+
-
-    Tokens can be separated by whitespace, and a requirement can be continued
-    over multiple lines using a backslash (``\\``).  Line-end comments (using
-    ``#``) are also allowed.
-
-    Some examples of valid requirement specifiers::
-
-        FooProject >= 1.2
-        Fizzy [foo, bar]
-        PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1
-        SomethingWhoseVersionIDontCareAbout
-
-    The project name is the only required portion of a requirement string, and
-    if it's the only thing supplied, the requirement will accept any version
-    of that project.
-
-    The "extras" in a requirement are used to request optional features of a
-    project, that may require additional project distributions in order to
-    function.  For example, if the hypothetical "Report-O-Rama" project offered
-    optional PDF support, it might require an additional library in order to
-    provide that support.  Thus, a project needing Report-O-Rama's PDF features
-    could use a requirement of ``Report-O-Rama[PDF]`` to request installation
-    or activation of both Report-O-Rama and any libraries it needs in order to
-    provide PDF support.  For example, you could use::
-
-        easy_install.py Report-O-Rama[PDF]
-
-    To install the necessary packages using the EasyInstall program, or call
-    ``pkg_resources.require('Report-O-Rama[PDF]')`` to add the necessary
-    distributions to sys.path at runtime.
-
-
-``Requirement`` Methods and Attributes
---------------------------------------
-
-``__contains__(dist_or_version)``
-    Return true if `dist_or_version` fits the criteria for this requirement.
-    If `dist_or_version` is a ``Distribution`` object, its project name must
-    match the requirement's project name, and its version must meet the
-    requirement's version criteria.  If `dist_or_version` is a string, it is
-    parsed using the ``parse_version()`` utility function.  Otherwise, it is
-    assumed to be an already-parsed version.
-
-    The ``Requirement`` object's version specifiers (``.specs``) are internally
-    sorted into ascending version order, and used to establish what ranges of
-    versions are acceptable.  Adjacent redundant conditions are effectively
-    consolidated (e.g. ``">1, >2"`` produces the same results as ``">1"``, and
-    ``"<2,<3"`` produces the same results as``"<3"``). ``"!="`` versions are
-    excised from the ranges they fall within.  The version being tested for
-    acceptability is then checked for membership in the resulting ranges.
-    (Note that providing conflicting conditions for the same version (e.g.
-    ``"<2,>=2"`` or ``"==2,!=2"``) is meaningless and may therefore produce
-    bizarre results when compared with actual version number(s).)
-
-``__eq__(other_requirement)``
-    A requirement compares equal to another requirement if they have
-    case-insensitively equal project names, version specifiers, and "extras".
-    (The order that extras and version specifiers are in is also ignored.)
-    Equal requirements also have equal hashes, so that requirements can be
-    used in sets or as dictionary keys.
-
-``__str__()``
-    The string form of a ``Requirement`` is a string that, if passed to
-    ``Requirement.parse()``, would return an equal ``Requirement`` object.
-
-``project_name``
-    The name of the required project
-
-``key``
-    An all-lowercase version of the ``project_name``, useful for comparison
-    or indexing.
-
-``extras``
-    A tuple of names of "extras" that this requirement calls for.  (These will
-    be all-lowercase and normalized using the ``safe_extra()`` parsing utility
-    function, so they may not exactly equal the extras the requirement was
-    created with.)
-
-``specs``
-    A list of ``(op,version)`` tuples, sorted in ascending parsed-version
-    order.  The `op` in each tuple is a comparison operator, represented as
-    a string.  The `version` is the (unparsed) version number.  The relative
-    order of tuples containing the same version numbers is undefined, since
-    having more than one operator for a given version is either redundant or
-    self-contradictory.
-
-
-Entry Points
-============
-
-Entry points are a simple way for distributions to "advertise" Python objects
-(such as functions or classes) for use by other distributions.  Extensible
-applications and frameworks can search for entry points with a particular name
-or group, either from a specific distribution or from all active distributions
-on sys.path, and then inspect or load the advertised objects at will.
-
-Entry points belong to "groups" which are named with a dotted name similar to
-a Python package or module name.  For example, the ``setuptools`` package uses
-an entry point named ``distutils.commands`` in order to find commands defined
-by distutils extensions.  ``setuptools`` treats the names of entry points
-defined in that group as the acceptable commands for a setup script.
-
-In a similar way, other packages can define their own entry point groups,
-either using dynamic names within the group (like ``distutils.commands``), or
-possibly using predefined names within the group.  For example, a blogging
-framework that offers various pre- or post-publishing hooks might define an
-entry point group and look for entry points named "pre_process" and
-"post_process" within that group.
-
-To advertise an entry point, a project needs to use ``setuptools`` and provide
-an ``entry_points`` argument to ``setup()`` in its setup script, so that the
-entry points will be included in the distribution's metadata.  For more
-details, see the ``setuptools`` documentation.  (XXX link here to setuptools)
-
-Each project distribution can advertise at most one entry point of a given
-name within the same entry point group.  For example, a distutils extension
-could advertise two different ``distutils.commands`` entry points, as long as
-they had different names.  However, there is nothing that prevents *different*
-projects from advertising entry points of the same name in the same group.  In
-some cases, this is a desirable thing, since the application or framework that
-uses the entry points may be calling them as hooks, or in some other way
-combining them.  It is up to the application or framework to decide what to do
-if multiple distributions advertise an entry point; some possibilities include
-using both entry points, displaying an error message, using the first one found
-in sys.path order, etc.
-
-
-Convenience API
----------------
-
-In the following functions, the `dist` argument can be a ``Distribution``
-instance, a ``Requirement`` instance, or a string specifying a requirement
-(i.e. project name, version, etc.).  If the argument is a string or
-``Requirement``, the specified distribution is located (and added to sys.path
-if not already present).  An error will be raised if a matching distribution is
-not available.
-
-The `group` argument should be a string containing a dotted identifier,
-identifying an entry point group.  If you are defining an entry point group,
-you should include some portion of your package's name in the group name so as
-to avoid collision with other packages' entry point groups.
-
-``load_entry_point(dist, group, name)``
-    Load the named entry point from the specified distribution, or raise
-    ``ImportError``.
-
-``get_entry_info(dist, group, name)``
-    Return an ``EntryPoint`` object for the given `group` and `name` from
-    the specified distribution.  Returns ``None`` if the distribution has not
-    advertised a matching entry point.
-
-``get_entry_map(dist, group=None)``
-    Return the distribution's entry point map for `group`, or the full entry
-    map for the distribution.  This function always returns a dictionary,
-    even if the distribution advertises no entry points.  If `group` is given,
-    the dictionary maps entry point names to the corresponding ``EntryPoint``
-    object.  If `group` is None, the dictionary maps group names to
-    dictionaries that then map entry point names to the corresponding
-    ``EntryPoint`` instance in that group.
-
-``iter_entry_points(group, name=None)``
-    Yield entry point objects from `group` matching `name`.
-
-    If `name` is None, yields all entry points in `group` from all
-    distributions in the working set on sys.path, otherwise only ones matching
-    both `group` and `name` are yielded.  Entry points are yielded from
-    the active distributions in the order that the distributions appear on
-    sys.path.  (Within entry points for a particular distribution, however,
-    there is no particular ordering.)
-
-    (This API is actually a method of the global ``working_set`` object; see
-    the section above on `Basic WorkingSet Methods`_ for more information.)
-
-
-Creating and Parsing
---------------------
-
-``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)``
-    Create an ``EntryPoint`` instance.  `name` is the entry point name.  The
-    `module_name` is the (dotted) name of the module containing the advertised
-    object.  `attrs` is an optional tuple of names to look up from the
-    module to obtain the advertised object.  For example, an `attrs` of
-    ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the
-    advertised object could be obtained by the following code::
-
-        import baz
-        advertised_object = baz.foo.bar
-
-    The `extras` are an optional tuple of "extra feature" names that the
-    distribution needs in order to provide this entry point.  When the
-    entry point is loaded, these extra features are looked up in the `dist`
-    argument to find out what other distributions may need to be activated
-    on sys.path; see the ``load()`` method for more details.  The `extras`
-    argument is only meaningful if `dist` is specified.  `dist` must be
-    a ``Distribution`` instance.
-
-``EntryPoint.parse(src, dist=None)`` (classmethod)
-    Parse a single entry point from string `src`
-
-    Entry point syntax follows the form::
-
-        name = some.module:some.attr [extra1,extra2]
-
-    The entry name and module name are required, but the ``:attrs`` and
-    ``[extras]`` parts are optional, as is the whitespace shown between
-    some of the items.  The `dist` argument is passed through to the
-    ``EntryPoint()`` constructor, along with the other values parsed from
-    `src`.
-
-``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod)
-    Parse `lines` (a string or sequence of lines) to create a dictionary
-    mapping entry point names to ``EntryPoint`` objects.  ``ValueError`` is
-    raised if entry point names are duplicated, if `group` is not a valid
-    entry point group name, or if there are any syntax errors.  (Note: the
-    `group` parameter is used only for validation and to create more
-    informative error messages.)  If `dist` is provided, it will be used to
-    set the ``dist`` attribute of the created ``EntryPoint`` objects.
-
-``EntryPoint.parse_map(data, dist=None)`` (classmethod)
-    Parse `data` into a dictionary mapping group names to dictionaries mapping
-    entry point names to ``EntryPoint`` objects.  If `data` is a dictionary,
-    then the keys are used as group names and the values are passed to
-    ``parse_group()`` as the `lines` argument.  If `data` is a string or
-    sequence of lines, it is first split into .ini-style sections (using
-    the ``split_sections()`` utility function) and the section names are used
-    as group names.  In either case, the `dist` argument is passed through to
-    ``parse_group()`` so that the entry points will be linked to the specified
-    distribution.
-
-
-``EntryPoint`` Objects
-----------------------
-
-For simple introspection, ``EntryPoint`` objects have attributes that
-correspond exactly to the constructor argument names: ``name``,
-``module_name``, ``attrs``, ``extras``, and ``dist`` are all available.  In
-addition, the following methods are provided:
-
-``load(require=True, env=None, installer=None)``
-    Load the entry point, returning the advertised Python object, or raise
-    ``ImportError`` if it cannot be obtained.  If `require` is a true value,
-    then ``require(env, installer)`` is called before attempting the import.
-
-``require(env=None, installer=None)``
-    Ensure that any "extras" needed by the entry point are available on
-    sys.path.  ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``,
-    but no ``dist``, or if the named extras are not defined by the
-    distribution.  If `env` is supplied, it must be an ``Environment``, and it
-    will be used to search for needed distributions if they are not already
-    present on sys.path.  If `installer` is supplied, it must be a callable
-    taking a ``Requirement`` instance and returning a matching importable
-    ``Distribution`` instance or None.
-
-``__str__()``
-    The string form of an ``EntryPoint`` is a string that could be passed to
-    ``EntryPoint.parse()`` to produce an equivalent ``EntryPoint``.
-
-
-``Distribution`` Objects
-========================
-
-``Distribution`` objects represent collections of Python code that may or may
-not be importable, and may or may not have metadata and resources associated
-with them.  Their metadata may include information such as what other projects
-the distribution depends on, what entry points the distribution advertises, and
-so on.
-
-
-Getting or Creating Distributions
----------------------------------
-
-Most commonly, you'll obtain ``Distribution`` objects from a ``WorkingSet`` or
-an ``Environment``.  (See the sections above on `WorkingSet Objects`_ and
-`Environment Objects`_, which are containers for active distributions and
-available distributions, respectively.)  You can also obtain ``Distribution``
-objects from one of these high-level APIs:
-
-``find_distributions(path_item, only=False)``
-    Yield distributions accessible via `path_item`.  If `only` is true, yield
-    only distributions whose ``location`` is equal to `path_item`.  In other
-    words, if `only` is true, this yields any distributions that would be
-    importable if `path_item` were on ``sys.path``.  If `only` is false, this
-    also yields distributions that are "in" or "under" `path_item`, but would
-    not be importable unless their locations were also added to ``sys.path``.
-
-``get_distribution(dist_spec)``
-    Return a ``Distribution`` object for a given ``Requirement`` or string.
-    If `dist_spec` is already a ``Distribution`` instance, it is returned.
-    If it is a ``Requirement`` object or a string that can be parsed into one,
-    it is used to locate and activate a matching distribution, which is then
-    returned.
-
-However, if you're creating specialized tools for working with distributions,
-or creating a new distribution format, you may also need to create
-``Distribution`` objects directly, using one of the three constructors below.
-
-These constructors all take an optional `metadata` argument, which is used to
-access any resources or metadata associated with the distribution.  `metadata`
-must be an object that implements the ``IResourceProvider`` interface, or None.
-If it is None, an ``EmptyProvider`` is used instead.  ``Distribution`` objects
-implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by
-delegating them to the `metadata` object.
-
-``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod)
-    Create a distribution for `location`, which must be a string such as a
-    URL, filename, or other string that might be used on ``sys.path``.
-    `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
-    If `basename` ends with ``.egg``, then the project's name, version, python
-    version and platform are extracted from the filename and used to set those
-    properties of the created distribution.  Any additional keyword arguments
-    are forwarded to the ``Distribution()`` constructor.
-
-``Distribution.from_filename(filename, metadata=None**kw)`` (classmethod)
-    Create a distribution by parsing a local filename.  This is a shorter way
-    of saying  ``Distribution.from_location(normalize_path(filename),
-    os.path.basename(filename), metadata)``.  In other words, it creates a
-    distribution whose location is the normalize form of the filename, parsing
-    name and version information from the base portion of the filename.  Any
-    additional keyword arguments are forwarded to the ``Distribution()``
-    constructor.
-
-``Distribution(location,metadata,project_name,version,py_version,platform,precedence)``
-    Create a distribution by setting its properties.  All arguments are
-    optional and default to None, except for `py_version` (which defaults to
-    the current Python version) and `precedence` (which defaults to
-    ``EGG_DIST``; for more details see ``precedence`` under `Distribution
-    Attributes`_ below).  Note that it's usually easier to use the
-    ``from_filename()`` or ``from_location()`` constructors than to specify
-    all these arguments individually.
-
-
-``Distribution`` Attributes
----------------------------
-
-location
-    A string indicating the distribution's location.  For an importable
-    distribution, this is the string that would be added to ``sys.path`` to
-    make it actively importable.  For non-importable distributions, this is
-    simply a filename, URL, or other way of locating the distribution.
-
-project_name
-    A string, naming the project that this distribution is for.  Project names
-    are defined by a project's setup script, and they are used to identify
-    projects on PyPI.  When a ``Distribution`` is constructed, the
-    `project_name` argument is passed through the ``safe_name()`` utility
-    function to filter out any unacceptable characters.
-
-key
-    ``dist.key`` is short for ``dist.project_name.lower()``.  It's used for
-    case-insensitive comparison and indexing of distributions by project name.
-
-extras
-    A list of strings, giving the names of extra features defined by the
-    project's dependency list (the ``extras_require`` argument specified in
-    the project's setup script).
-
-version
-    A string denoting what release of the project this distribution contains.
-    When a ``Distribution`` is constructed, the `version` argument is passed
-    through the ``safe_version()`` utility function to filter out any
-    unacceptable characters.  If no `version` is specified at construction
-    time, then attempting to access this attribute later will cause the
-    ``Distribution`` to try to discover its version by reading its ``PKG-INFO``
-    metadata file.  If ``PKG-INFO`` is unavailable or can't be parsed,
-    ``ValueError`` is raised.
-
-parsed_version
-    The ``parsed_version`` is a tuple representing a "parsed" form of the
-    distribution's ``version``.  ``dist.parsed_version`` is a shortcut for
-    calling ``parse_version(dist.version)``.  It is used to compare or sort
-    distributions by version.  (See the `Parsing Utilities`_ section below for
-    more information on the ``parse_version()`` function.)  Note that accessing
-    ``parsed_version`` may result in a ``ValueError`` if the ``Distribution``
-    was constructed without a `version` and without `metadata` capable of
-    supplying the missing version info.
-
-py_version
-    The major/minor Python version the distribution supports, as a string.
-    For example, "2.3" or "2.4".  The default is the current version of Python.
-
-platform
-    A string representing the platform the distribution is intended for, or
-    ``None`` if the distribution is "pure Python" and therefore cross-platform.
-    See `Platform Utilities`_ below for more information on platform strings.
-
-precedence
-    A distribution's ``precedence`` is used to determine the relative order of
-    two distributions that have the same ``project_name`` and
-    ``parsed_version``.  The default precedence is ``pkg_resources.EGG_DIST``,
-    which is the highest (i.e. most preferred) precedence.  The full list
-    of predefined precedences, from most preferred to least preferred, is:
-    ``EGG_DIST``, ``BINARY_DIST``, ``SOURCE_DIST``, ``CHECKOUT_DIST``, and
-    ``DEVELOP_DIST``.  Normally, precedences other than ``EGG_DIST`` are used
-    only by the ``setuptools.package_index`` module, when sorting distributions
-    found in a package index to determine their suitability for installation.
-    "System" and "Development" eggs (i.e., ones that use the ``.egg-info``
-    format), however, are automatically given a precedence of ``DEVELOP_DIST``.
-
-
-
-``Distribution`` Methods
-------------------------
-
-``activate(path=None)``
-    Ensure distribution is importable on `path`.  If `path` is None,
-    ``sys.path`` is used instead.  This ensures that the distribution's
-    ``location`` is in the `path` list, and it also performs any necessary
-    namespace package fixups or declarations.  (That is, if the distribution
-    contains namespace packages, this method ensures that they are declared,
-    and that the distribution's contents for those namespace packages are
-    merged with the contents provided by any other active distributions.  See
-    the section above on `Namespace Package Support`_ for more information.)
-
-    ``pkg_resources`` adds a notification callback to the global ``working_set``
-    that ensures this method is called whenever a distribution is added to it.
-    Therefore, you should not normally need to explicitly call this method.
-    (Note that this means that namespace packages on ``sys.path`` are always
-    imported as soon as ``pkg_resources`` is, which is another reason why
-    namespace packages should not contain any code or import statements.)
-
-``as_requirement()``
-    Return a ``Requirement`` instance that matches this distribution's project
-    name and version.
-
-``requires(extras=())``
-    List the ``Requirement`` objects that specify this distribution's
-    dependencies.  If `extras` is specified, it should be a sequence of names
-    of "extras" defined by the distribution, and the list returned will then
-    include any dependencies needed to support the named "extras".
-
-``clone(**kw)``
-    Create a copy of the distribution.  Any supplied keyword arguments override
-    the corresponding argument to the ``Distribution()`` constructor, allowing
-    you to change some of the copied distribution's attributes.
-
-``egg_name()``
-    Return what this distribution's standard filename should be, not including
-    the ".egg" extension.  For example, a distribution for project "Foo"
-    version 1.2 that runs on Python 2.3 for Windows would have an ``egg_name()``
-    of ``Foo-1.2-py2.3-win32``.  Any dashes in the name or version are
-    converted to underscores.  (``Distribution.from_location()`` will convert
-    them back when parsing a ".egg" file name.)
-
-``__cmp__(other)``, ``__hash__()``
-    Distribution objects are hashed and compared on the basis of their parsed
-    version and precedence, followed by their key (lowercase project name),
-    location, Python version, and platform.
-
-The following methods are used to access ``EntryPoint`` objects advertised
-by the distribution.  See the section above on `Entry Points`_ for more
-detailed information about these operations:
-
-``get_entry_info(group, name)``
-    Return the ``EntryPoint`` object for `group` and `name`, or None if no
-    such point is advertised by this distribution.
-
-``get_entry_map(group=None)``
-    Return the entry point map for `group`.  If `group` is None, return
-    a dictionary mapping group names to entry point maps for all groups.
-    (An entry point map is a dictionary of entry point names to ``EntryPoint``
-    objects.)
-
-``load_entry_point(group, name)``
-    Short for ``get_entry_info(group, name).load()``.  Returns the object
-    advertised by the named entry point, or raises ``ImportError`` if
-    the entry point isn't advertised by this distribution, or there is some
-    other import problem.
-
-In addition to the above methods, ``Distribution`` objects also implement all
-of the `IResourceProvider`_ and `IMetadataProvider Methods`_ (which are
-documented in later sections):
-
-* ``has_metadata(name)``
-* ``metadata_isdir(name)``
-* ``metadata_listdir(name)``
-* ``get_metadata(name)``
-* ``get_metadata_lines(name)``
-* ``run_script(script_name, namespace)``
-* ``get_resource_filename(manager, resource_name)``
-* ``get_resource_stream(manager, resource_name)``
-* ``get_resource_string(manager, resource_name)``
-* ``has_resource(resource_name)``
-* ``resource_isdir(resource_name)``
-* ``resource_listdir(resource_name)``
-
-If the distribution was created with a `metadata` argument, these resource and
-metadata access methods are all delegated to that `metadata` provider.
-Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution
-will appear to have no resources or metadata.  This delegation approach is used
-so that supporting custom importers or new distribution formats can be done
-simply by creating an appropriate `IResourceProvider`_ implementation; see the
-section below on `Supporting Custom Importers`_ for more details.
-
-
-``ResourceManager`` API
-=======================
-
-The ``ResourceManager`` class provides uniform access to package resources,
-whether those resources exist as files and directories or are compressed in
-an archive of some kind.
-
-Normally, you do not need to create or explicitly manage ``ResourceManager``
-instances, as the ``pkg_resources`` module creates a global instance for you,
-and makes most of its methods available as top-level names in the
-``pkg_resources`` module namespace.  So, for example, this code actually
-calls the ``resource_string()`` method of the global ``ResourceManager``::
-
-    import pkg_resources
-    my_data = pkg_resources.resource_string(__name__, "foo.dat")
-
-Thus, you can use the APIs below without needing an explicit
-``ResourceManager`` instance; just import and use them as needed.
-
-
-Basic Resource Access
----------------------
-
-In the following methods, the `package_or_requirement` argument may be either
-a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance.
-If it is a package or module name, the named module or package must be
-importable (i.e., be in a distribution or directory on ``sys.path``), and the
-`resource_name` argument is interpreted relative to the named package.  (Note
-that if a module name is used, then the resource name is relative to the
-package immediately containing the named module.  Also, you should not use use
-a namespace package name, because a namespace package can be spread across
-multiple distributions, and is therefore ambiguous as to which distribution
-should be searched for the resource.)
-
-If it is a ``Requirement``, then the requirement is automatically resolved
-(searching the current ``Environment`` if necessary) and a matching
-distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not
-already present.  (Unless the ``Requirement`` can't be satisfied, in which
-case an exception is raised.)  The `resource_name` argument is then interpreted
-relative to the root of the identified distribution; i.e. its first path
-segment will be treated as a peer of the top-level modules or packages in the
-distribution.
-
-Note that resource names must be ``/``-separated paths and cannot be absolute
-(i.e. no leading ``/``) or contain relative names like ``".."``.  Do *not* use
-``os.path`` routines to manipulate resource paths, as they are *not* filesystem
-paths.
-
-``resource_exists(package_or_requirement, resource_name)``
-    Does the named resource exist?  Return ``True`` or ``False`` accordingly.
-
-``resource_stream(package_or_requirement, resource_name)``
-    Return a readable file-like object for the specified resource; it may be
-    an actual file, a ``StringIO``, or some similar object.  The stream is
-    in "binary mode", in the sense that whatever bytes are in the resource
-    will be read as-is.
-
-``resource_string(package_or_requirement, resource_name)``
-    Return the specified resource as a string.  The resource is read in
-    binary fashion, such that the returned string contains exactly the bytes
-    that are stored in the resource.
-
-``resource_isdir(package_or_requirement, resource_name)``
-    Is the named resource a directory?  Return ``True`` or ``False``
-    accordingly.
-
-``resource_listdir(package_or_requirement, resource_name)``
-    List the contents of the named resource directory, just like ``os.listdir``
-    except that it works even if the resource is in a zipfile.
-
-Note that only ``resource_exists()`` and ``resource_isdir()`` are insensitive
-as to the resource type.  You cannot use ``resource_listdir()`` on a file
-resource, and you can't use ``resource_string()`` or ``resource_stream()`` on
-directory resources.  Using an inappropriate method for the resource type may
-result in an exception or undefined behavior, depending on the platform and
-distribution format involved.
-
-
-Resource Extraction
--------------------
-
-``resource_filename(package_or_requirement, resource_name)``
-    Sometimes, it is not sufficient to access a resource in string or stream
-    form, and a true filesystem filename is needed.  In such cases, you can
-    use this method (or module-level function) to obtain a filename for a
-    resource.  If the resource is in an archive distribution (such as a zipped
-    egg), it will be extracted to a cache directory, and the filename within
-    the cache will be returned.  If the named resource is a directory, then
-    all resources within that directory (including subdirectories) are also
-    extracted.  If the named resource is a C extension or "eager resource"
-    (see the ``setuptools`` documentation for details), then all C extensions
-    and eager resources are extracted at the same time.
-
-    Archived resources are extracted to a cache location that can be managed by
-    the following two methods:
-
-``set_extraction_path(path)``
-    Set the base path where resources will be extracted to, if needed.
-
-    If you do not call this routine before any extractions take place, the
-    path defaults to the return value of ``get_default_cache()``.  (Which is
-    based on the ``PYTHON_EGG_CACHE`` environment variable, with various
-    platform-specific fallbacks.  See that routine's documentation for more
-    details.)
-
-    Resources are extracted to subdirectories of this path based upon
-    information given by the resource provider.  You may set this to a
-    temporary directory, but then you must call ``cleanup_resources()`` to
-    delete the extracted files when done.  There is no guarantee that
-    ``cleanup_resources()`` will be able to remove all extracted files.  (On
-    Windows, for example, you can't unlink .pyd or .dll files that are still
-    in use.)
-
-    Note that you may not change the extraction path for a given resource
-    manager once resources have been extracted, unless you first call
-    ``cleanup_resources()``.
-
-``cleanup_resources(force=False)``
-    Delete all extracted resource files and directories, returning a list
-    of the file and directory names that could not be successfully removed.
-    This function does not have any concurrency protection, so it should
-    generally only be called when the extraction path is a temporary
-    directory exclusive to a single process.  This method is not
-    automatically called; you must call it explicitly or register it as an
-    ``atexit`` function if you wish to ensure cleanup of a temporary
-    directory used for extractions.
-
-
-"Provider" Interface
---------------------
-
-If you are implementing an ``IResourceProvider`` and/or ``IMetadataProvider``
-for a new distribution archive format, you may need to use the following
-``IResourceManager`` methods to co-ordinate extraction of resources to the
-filesystem.  If you're not implementing an archive format, however, you have
-no need to use these methods.  Unlike the other methods listed above, they are
-*not* available as top-level functions tied to the global ``ResourceManager``;
-you must therefore have an explicit ``ResourceManager`` instance to use them.
-
-``get_cache_path(archive_name, names=())``
-    Return absolute location in cache for `archive_name` and `names`
-
-    The parent directory of the resulting path will be created if it does
-    not already exist.  `archive_name` should be the base filename of the
-    enclosing egg (which may not be the name of the enclosing zipfile!),
-    including its ".egg" extension.  `names`, if provided, should be a
-    sequence of path name parts "under" the egg's extraction location.
-
-    This method should only be called by resource providers that need to
-    obtain an extraction location, and only for names they intend to
-    extract, as it tracks the generated names for possible cleanup later.
-
-``extraction_error()``
-    Raise an ``ExtractionError`` describing the active exception as interfering
-    with the extraction process.  You should call this if you encounter any
-    OS errors extracting the file to the cache path; it will format the
-    operating system exception for you, and add other information to the
-    ``ExtractionError`` instance that may be needed by programs that want to
-    wrap or handle extraction errors themselves.
-
-``postprocess(tempname, filename)``
-    Perform any platform-specific postprocessing of `tempname`.
-    Resource providers should call this method ONLY after successfully
-    extracting a compressed resource.  They must NOT call it on resources
-    that are already in the filesystem.
-
-    `tempname` is the current (temporary) name of the file, and `filename`
-    is the name it will be renamed to by the caller after this routine
-    returns.
-
-
-Metadata API
-============
-
-The metadata API is used to access metadata resources bundled in a pluggable
-distribution.  Metadata resources are virtual files or directories containing
-information about the distribution, such as might be used by an extensible
-application or framework to connect "plugins".  Like other kinds of resources,
-metadata resource names are ``/``-separated and should not contain ``..`` or
-begin with a ``/``.  You should not use ``os.path`` routines to manipulate
-resource paths.
-
-The metadata API is provided by objects implementing the ``IMetadataProvider``
-or ``IResourceProvider`` interfaces.  ``Distribution`` objects implement this
-interface, as do objects returned by the ``get_provider()`` function:
-
-``get_provider(package_or_requirement)``
-    If a package name is supplied, return an ``IResourceProvider`` for the
-    package.  If a ``Requirement`` is supplied, resolve it by returning a
-    ``Distribution`` from the current working set (searching the current
-    ``Environment`` if necessary and adding the newly found ``Distribution``
-    to the working set).  If the named package can't be imported, or the
-    ``Requirement`` can't be satisfied, an exception is raised.
-
-    NOTE: if you use a package name rather than a ``Requirement``, the object
-    you get back may not be a pluggable distribution, depending on the method
-    by which the package was installed.  In particular, "development" packages
-    and "single-version externally-managed" packages do not have any way to
-    map from a package name to the corresponding project's metadata.  Do not
-    write code that passes a package name to ``get_provider()`` and then tries
-    to retrieve project metadata from the returned object.  It may appear to
-    work when the named package is in an ``.egg`` file or directory, but
-    it will fail in other installation scenarios.  If you want project
-    metadata, you need to ask for a *project*, not a package.
-
-
-``IMetadataProvider`` Methods
------------------------------
-
-The methods provided by objects (such as ``Distribution`` instances) that
-implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are:
-
-``has_metadata(name)``
-    Does the named metadata resource exist?
-
-``metadata_isdir(name)``
-    Is the named metadata resource a directory?
-
-``metadata_listdir(name)``
-    List of metadata names in the directory (like ``os.listdir()``)
-
-``get_metadata(name)``
-    Return the named metadata resource as a string.  The data is read in binary
-    mode; i.e., the exact bytes of the resource file are returned.
-
-``get_metadata_lines(name)``
-    Yield named metadata resource as list of non-blank non-comment lines.  This
-    is short for calling ``yield_lines(provider.get_metadata(name))``.  See the
-    section on `yield_lines()`_ below for more information on the syntax it
-    recognizes.
-
-``run_script(script_name, namespace)``
-    Execute the named script in the supplied namespace dictionary.  Raises
-    ``ResolutionError`` if there is no script by that name in the ``scripts``
-    metadata directory.  `namespace` should be a Python dictionary, usually
-    a module dictionary if the script is being run as a module.
-
-
-Exceptions
-==========
-
-``pkg_resources`` provides a simple exception hierarchy for problems that may
-occur when processing requests to locate and activate packages::
-
-    ResolutionError
-        DistributionNotFound
-        VersionConflict
-        UnknownExtra
-
-    ExtractionError
-
-``ResolutionError``
-    This class is used as a base class for the other three exceptions, so that
-    you can catch all of them with a single "except" clause.  It is also raised
-    directly for miscellaneous requirement-resolution problems like trying to
-    run a script that doesn't exist in the distribution it was requested from.
-
-``DistributionNotFound``
-    A distribution needed to fulfill a requirement could not be found.
-
-``VersionConflict``
-    The requested version of a project conflicts with an already-activated
-    version of the same project.
-
-``UnknownExtra``
-    One of the "extras" requested was not recognized by the distribution it
-    was requested from.
-
-``ExtractionError``
-    A problem occurred extracting a resource to the Python Egg cache.  The
-    following attributes are available on instances of this exception:
-
-    manager
-        The resource manager that raised this exception
-
-    cache_path
-        The base directory for resource extraction
-
-    original_error
-        The exception instance that caused extraction to fail
-
-
-Supporting Custom Importers
-===========================
-
-By default, ``pkg_resources`` supports normal filesystem imports, and
-``zipimport`` importers.  If you wish to use the ``pkg_resources`` features
-with other (PEP 302-compatible) importers or module loaders, you may need to
-register various handlers and support functions using these APIs:
-
-``register_finder(importer_type, distribution_finder)``
-    Register `distribution_finder` to find distributions in ``sys.path`` items.
-    `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path``
-    item handler), and `distribution_finder` is a callable that, when passed a
-    path item, the importer instance, and an `only` flag, yields
-    ``Distribution`` instances found under that path item.  (The `only` flag,
-    if true, means the finder should yield only ``Distribution`` objects whose
-    ``location`` is equal to the path item provided.)
-
-    See the source of the ``pkg_resources.find_on_path`` function for an
-    example finder function.
-
-``register_loader_type(loader_type, provider_factory)``
-    Register `provider_factory` to make ``IResourceProvider`` objects for
-    `loader_type`.  `loader_type` is the type or class of a PEP 302
-    ``module.__loader__``, and `provider_factory` is a function that, when
-    passed a module object, returns an `IResourceProvider`_ for that module,
-    allowing it to be used with the `ResourceManager API`_.
-
-``register_namespace_handler(importer_type, namespace_handler)``
-    Register `namespace_handler` to declare namespace packages for the given
-    `importer_type`.  `importer_type` is the type or class of a PEP 302
-    "importer" (sys.path item handler), and `namespace_handler` is a callable
-    with a signature like this::
-
-        def namespace_handler(importer, path_entry, moduleName, module):
-            # return a path_entry to use for child packages
-
-    Namespace handlers are only called if the relevant importer object has
-    already agreed that it can handle the relevant path item.  The handler
-    should only return a subpath if the module ``__path__`` does not already
-    contain an equivalent subpath.  Otherwise, it should return None.
-
-    For an example namespace handler, see the source of the
-    ``pkg_resources.file_ns_handler`` function, which is used for both zipfile
-    importing and regular importing.
-
-
-IResourceProvider
------------------
-
-``IResourceProvider`` is an abstract class that documents what methods are
-required of objects returned by a `provider_factory` registered with
-``register_loader_type()``.  ``IResourceProvider`` is a subclass of
-``IMetadataProvider``, so objects that implement this interface must also
-implement all of the `IMetadataProvider Methods`_ as well as the methods
-shown here.  The `manager` argument to the methods below must be an object
-that supports the full `ResourceManager API`_ documented above.
-
-``get_resource_filename(manager, resource_name)``
-    Return a true filesystem path for `resource_name`, co-ordinating the
-    extraction with `manager`, if the resource must be unpacked to the
-    filesystem.
-
-``get_resource_stream(manager, resource_name)``
-    Return a readable file-like object for `resource_name`.
-
-``get_resource_string(manager, resource_name)``
-    Return a string containing the contents of `resource_name`.
-
-``has_resource(resource_name)``
-    Does the package contain the named resource?
-
-``resource_isdir(resource_name)``
-    Is the named resource a directory?  Return a false value if the resource
-    does not exist or is not a directory.
-
-``resource_listdir(resource_name)``
-    Return a list of the contents of the resource directory, ala
-    ``os.listdir()``.  Requesting the contents of a non-existent directory may
-    raise an exception.
-
-Note, by the way, that your provider classes need not (and should not) subclass
-``IResourceProvider`` or ``IMetadataProvider``!  These classes exist solely
-for documentation purposes and do not provide any useful implementation code.
-You may instead wish to subclass one of the `built-in resource providers`_.
-
-
-Built-in Resource Providers
----------------------------
-
-``pkg_resources`` includes several provider classes that are automatically used
-where appropriate.  Their inheritance tree looks like this::
-
-    NullProvider
-        EggProvider
-            DefaultProvider
-                PathMetadata
-            ZipProvider
-                EggMetadata
-        EmptyProvider
-            FileMetadata
-
-
-``NullProvider``
-    This provider class is just an abstract base that provides for common
-    provider behaviors (such as running scripts), given a definition for just
-    a few abstract methods.
-
-``EggProvider``
-    This provider class adds in some egg-specific features that are common
-    to zipped and unzipped eggs.
-
-``DefaultProvider``
-    This provider class is used for unpacked eggs and "plain old Python"
-    filesystem modules.
-
-``ZipProvider``
-    This provider class is used for all zipped modules, whether they are eggs
-    or not.
-
-``EmptyProvider``
-    This provider class always returns answers consistent with a provider that
-    has no metadata or resources.  ``Distribution`` objects created without
-    a ``metadata`` argument use an instance of this provider class instead.
-    Since all ``EmptyProvider`` instances are equivalent, there is no need
-    to have more than one instance.  ``pkg_resources`` therefore creates a
-    global instance of this class under the name ``empty_provider``, and you
-    may use it if you have need of an ``EmptyProvider`` instance.
-
-``PathMetadata(path, egg_info)``
-    Create an ``IResourceProvider`` for a filesystem-based distribution, where
-    `path` is the filesystem location of the importable modules, and `egg_info`
-    is the filesystem location of the distribution's metadata directory.
-    `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an
-    "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for
-    a "development egg".  However, other uses are possible for custom purposes.
-
-``EggMetadata(zipimporter)``
-    Create an ``IResourceProvider`` for a zipfile-based distribution.  The
-    `zipimporter` should be a ``zipimport.zipimporter`` instance, and may
-    represent a "basket" (a zipfile containing multiple ".egg" subdirectories)
-    a specific egg *within* a basket, or a zipfile egg (where the zipfile
-    itself is a ".egg").  It can also be a combination, such as a zipfile egg
-    that also contains other eggs.
-
-``FileMetadata(path_to_pkg_info)``
-    Create an ``IResourceProvider`` that provides exactly one metadata
-    resource: ``PKG-INFO``.  The supplied path should be a distutils PKG-INFO
-    file.  This is basically the same as an ``EmptyProvider``, except that
-    requests for ``PKG-INFO`` will be answered using the contents of the
-    designated file.  (This provider is used to wrap ``.egg-info`` files
-    installed by vendor-supplied system packages.)
-
-
-Utility Functions
-=================
-
-In addition to its high-level APIs, ``pkg_resources`` also includes several
-generally-useful utility routines.  These routines are used to implement the
-high-level APIs, but can also be quite useful by themselves.
-
-
-Parsing Utilities
------------------
-
-``parse_version(version)``
-    Parse a project's version string, returning a value that can be used to
-    compare versions by chronological order.  Semantically, the format is a
-    rough cross between distutils' ``StrictVersion`` and ``LooseVersion``
-    classes; if you give it versions that would work with ``StrictVersion``,
-    then they will compare the same way.  Otherwise, comparisons are more like
-    a "smarter" form of ``LooseVersion``.  It is *possible* to create
-    pathological version coding schemes that will fool this parser, but they
-    should be very rare in practice.
-
-    The returned value will be a tuple of strings.  Numeric portions of the
-    version are padded to 8 digits so they will compare numerically, but
-    without relying on how numbers compare relative to strings.  Dots are
-    dropped, but dashes are retained.  Trailing zeros between alpha segments
-    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
-    "2.4". Alphanumeric parts are lower-cased.
-
-    The algorithm assumes that strings like "-" and any alpha string that
-    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
-    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
-    considered newer than "2.4-1", which in turn is newer than "2.4".
-
-    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
-    come before "final" alphabetically) are assumed to be pre-release versions,
-    so that the version "2.4" is considered newer than "2.4a1".  Any "-"
-    characters preceding a pre-release indicator are removed.  (In versions of
-    setuptools prior to 0.6a9, "-" characters were not removed, leading to the
-    unintuitive result that "0.2-rc1" was considered a newer version than
-    "0.2".)
-
-    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
-    "rc" are treated as if they were "c", i.e. as though they were release
-    candidates, and therefore are not as new as a version string that does not
-    contain them.  And the string "dev" is treated as if it were an "@" sign;
-    that is, a version coming before even "a" or "alpha".
-
-.. _yield_lines():
-
-``yield_lines(strs)``
-    Yield non-empty/non-comment lines from a string/unicode or a possibly-
-    nested sequence thereof.  If `strs` is an instance of ``basestring``, it
-    is split into lines, and each non-blank, non-comment line is yielded after
-    stripping leading and trailing whitespace.  (Lines whose first non-blank
-    character is ``#`` are considered comment lines.)
-
-    If `strs` is not an instance of ``basestring``, it is iterated over, and
-    each item is passed recursively to ``yield_lines()``, so that an arbitarily
-    nested sequence of strings, or sequences of sequences of strings can be
-    flattened out to the lines contained therein.  So for example, passing
-    a file object or a list of strings to ``yield_lines`` will both work.
-    (Note that between each string in a sequence of strings there is assumed to
-    be an implicit line break, so lines cannot bridge two strings in a
-    sequence.)
-
-    This routine is used extensively by ``pkg_resources`` to parse metadata
-    and file formats of various kinds, and most other ``pkg_resources``
-    parsing functions that yield multiple values will use it to break up their
-    input.  However, this routine is idempotent, so calling ``yield_lines()``
-    on the output of another call to ``yield_lines()`` is completely harmless.
-
-``split_sections(strs)``
-    Split a string (or possibly-nested iterable thereof), yielding ``(section,
-    content)`` pairs found using an ``.ini``-like syntax.  Each ``section`` is
-    a whitespace-stripped version of the section name ("``[section]``")
-    and each ``content`` is a list of stripped lines excluding blank lines and
-    comment-only lines.  If there are any non-blank, non-comment lines before
-    the first section header, they're yielded in a first ``section`` of
-    ``None``.
-
-    This routine uses ``yield_lines()`` as its front end, so you can pass in
-    anything that ``yield_lines()`` accepts, such as an open text file, string,
-    or sequence of strings.  ``ValueError`` is raised if a malformed section
-    header is found (i.e. a line starting with ``[`` but not ending with
-    ``]``).
-
-    Note that this simplistic parser assumes that any line whose first nonblank
-    character is ``[`` is a section heading, so it can't support .ini format
-    variations that allow ``[`` as the first nonblank character on other lines.
-
-``safe_name(name)``
-    Return a "safe" form of a project's name, suitable for use in a
-    ``Requirement`` string, as a distribution name, or a PyPI project name.
-    All non-alphanumeric runs are condensed to single "-" characters, such that
-    a name like "The $$$ Tree" becomes "The-Tree".  Note that if you are
-    generating a filename from this value you should combine it with a call to
-    ``to_filename()`` so all dashes ("-") are replaced by underscores ("_").
-    See ``to_filename()``.
-
-``safe_version(version)``
-    Similar to ``safe_name()`` except that spaces in the input become dots, and
-    dots are allowed to exist in the output.  As with ``safe_name()``, if you
-    are generating a filename from this you should replace any "-" characters
-    in the output with underscores.
-
-``safe_extra(extra)``
-    Return a "safe" form of an extra's name, suitable for use in a requirement
-    string or a setup script's ``extras_require`` keyword.  This routine is
-    similar to ``safe_name()`` except that non-alphanumeric runs are replaced
-    by a single underbar (``_``), and the result is lowercased.
-
-``to_filename(name_or_version)``
-    Escape a name or version string so it can be used in a dash-separated
-    filename (or ``#egg=name-version`` tag) without ambiguity.  You
-    should only pass in values that were returned by ``safe_name()`` or
-    ``safe_version()``.
-
-
-Platform Utilities
-------------------
-
-``get_build_platform()``
-    Return this platform's identifier string.  For Windows, the return value
-    is ``"win32"``, and for Mac OS X it is a string of the form
-    ``"macosx-10.4-ppc"``.  All other platforms return the same uname-based
-    string that the ``distutils.util.get_platform()`` function returns.
-    This string is the minimum platform version required by distributions built
-    on the local machine.  (Backward compatibility note: setuptools versions
-    prior to 0.6b1 called this function ``get_platform()``, and the function is
-    still available under that name for backward compatibility reasons.)
-
-``get_supported_platform()`` (New in 0.6b1)
-    This is the similar to ``get_build_platform()``, but is the maximum
-    platform version that the local machine supports.  You will usually want
-    to use this value as the ``provided`` argument to the
-    ``compatible_platforms()`` function.
-
-``compatible_platforms(provided, required)``
-    Return true if a distribution built on the `provided` platform may be used
-    on the `required` platform.  If either platform value is ``None``, it is
-    considered a wildcard, and the platforms are therefore compatible.
-    Likewise, if the platform strings are equal, they're also considered
-    compatible, and ``True`` is returned.  Currently, the only non-equal
-    platform strings that are considered compatible are Mac OS X platform
-    strings with the same hardware type (e.g. ``ppc``) and major version
-    (e.g. ``10``) with the `provided` platform's minor version being less than
-    or equal to the `required` platform's minor version.
-
-``get_default_cache()``
-    Determine the default cache location for extracting resources from zipped
-    eggs.  This routine returns the ``PYTHON_EGG_CACHE`` environment variable,
-    if set.  Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of
-    the user's "Application Data" directory.  On all other systems, it returns
-    ``os.path.expanduser("~/.python-eggs")`` if ``PYTHON_EGG_CACHE`` is not
-    set.
-
-
-PEP 302 Utilities
------------------
-
-``get_importer(path_item)``
-    Retrieve a PEP 302 "importer" for the given path item (which need not
-    actually be on ``sys.path``).  This routine simulates the PEP 302 protocol
-    for obtaining an "importer" object.  It first checks for an importer for
-    the path item in ``sys.path_importer_cache``, and if not found it calls
-    each of the ``sys.path_hooks`` and caches the result if a good importer is
-    found.  If no importer is found, this routine returns an ``ImpWrapper``
-    instance that wraps the builtin import machinery as a PEP 302-compliant
-    "importer" object.  This ``ImpWrapper`` is *not* cached; instead a new
-    instance is returned each time.
-
-    (Note: When run under Python 2.5, this function is simply an alias for
-    ``pkgutil.get_importer()``, and instead of ``pkg_resources.ImpWrapper``
-    instances, it may return ``pkgutil.ImpImporter`` instances.)
-
-
-File/Path Utilities
--------------------
-
-``ensure_directory(path)``
-    Ensure that the parent directory (``os.path.dirname``) of `path` actually
-    exists, using ``os.makedirs()`` if necessary.
-
-``normalize_path(path)``
-    Return a "normalized" version of `path`, such that two paths represent
-    the same filesystem location if they have equal ``normalized_path()``
-    values.  Specifically, this is a shortcut for calling ``os.path.realpath``
-    and ``os.path.normcase`` on `path`.  Unfortunately, on certain platforms
-    (notably Cygwin and Mac OS X) the ``normcase`` function does not accurately
-    reflect the platform's case-sensitivity, so there is always the possibility
-    of two apparently-different paths being equal on such platforms.
-
-History
--------
-
-0.6c9
- * Fix ``resource_listdir('')`` always returning an empty list for zipped eggs.
-
-0.6c7
- * Fix package precedence problem where single-version eggs installed in
-   ``site-packages`` would take precedence over ``.egg`` files (or directories)
-   installed in ``site-packages``.
-
-0.6c6
- * Fix extracted C extensions not having executable permissions under Cygwin.
-
- * Allow ``.egg-link`` files to contain relative paths.
-
- * Fix cache dir defaults on Windows when multiple environment vars are needed
-   to construct a path.
-
-0.6c4
- * Fix "dev" versions being considered newer than release candidates.
-
-0.6c3
- * Python 2.5 compatibility fixes.
-
-0.6c2
- * Fix a problem with eggs specified directly on ``PYTHONPATH`` on
-   case-insensitive filesystems possibly not showing up in the default
-   working set, due to differing normalizations of ``sys.path`` entries.
-
-0.6b3
- * Fixed a duplicate path insertion problem on case-insensitive filesystems.
-
-0.6b1
- * Split ``get_platform()`` into ``get_supported_platform()`` and
-   ``get_build_platform()`` to work around a Mac versioning problem that caused
-   the behavior of ``compatible_platforms()`` to be platform specific.
-
- * Fix entry point parsing when a standalone module name has whitespace
-   between it and the extras.
-
-0.6a11
- * Added ``ExtractionError`` and ``ResourceManager.extraction_error()`` so that
-   cache permission problems get a more user-friendly explanation of the
-   problem, and so that programs can catch and handle extraction errors if they
-   need to.
-
-0.6a10
- * Added the ``extras`` attribute to ``Distribution``, the ``find_plugins()``
-   method to ``WorkingSet``, and the ``__add__()`` and ``__iadd__()`` methods
-   to ``Environment``.
-
- * ``safe_name()`` now allows dots in project names.
-
- * There is a new ``to_filename()`` function that escapes project names and
-   versions for safe use in constructing egg filenames from a Distribution
-   object's metadata.
-
- * Added ``Distribution.clone()`` method, and keyword argument support to other
-   ``Distribution`` constructors.
-
- * Added the ``DEVELOP_DIST`` precedence, and automatically assign it to
-   eggs using ``.egg-info`` format.
-
-0.6a9
- * Don't raise an error when an invalid (unfinished) distribution is found
-   unless absolutely necessary.  Warn about skipping invalid/unfinished eggs
-   when building an Environment.
-
- * Added support for ``.egg-info`` files or directories with version/platform
-   information embedded in the filename, so that system packagers have the
-   option of including ``PKG-INFO`` files to indicate the presence of a
-   system-installed egg, without needing to use ``.egg`` directories, zipfiles,
-   or ``.pth`` manipulation.
-
- * Changed ``parse_version()`` to remove dashes before pre-release tags, so
-   that ``0.2-rc1`` is considered an *older* version than ``0.2``, and is equal
-   to ``0.2rc1``.  The idea that a dash *always* meant a post-release version
-   was highly non-intuitive to setuptools users and Python developers, who
-   seem to want to use ``-rc`` version numbers a lot.
-
-0.6a8
- * Fixed a problem with ``WorkingSet.resolve()`` that prevented version
-   conflicts from being detected at runtime.
-
- * Improved runtime conflict warning message to identify a line in the user's
-   program, rather than flagging the ``warn()`` call in ``pkg_resources``.
-
- * Avoid giving runtime conflict warnings for namespace packages, even if they
-   were declared by a different package than the one currently being activated.
-
- * Fix path insertion algorithm for case-insensitive filesystems.
-
- * Fixed a problem with nested namespace packages (e.g. ``peak.util``) not
-   being set as an attribute of their parent package.
-
-0.6a6
- * Activated distributions are now inserted in ``sys.path`` (and the working
-   set) just before the directory that contains them, instead of at the end.
-   This allows e.g. eggs in ``site-packages`` to override unmanaged modules in
-   the same location, and allows eggs found earlier on ``sys.path`` to override
-   ones found later.
-
- * When a distribution is activated, it now checks whether any contained
-   non-namespace modules have already been imported and issues a warning if
-   a conflicting module has already been imported.
-
- * Changed dependency processing so that it's breadth-first, allowing a
-   depender's preferences to override those of a dependee, to prevent conflicts
-   when a lower version is acceptable to the dependee, but not the depender.
-
- * Fixed a problem extracting zipped files on Windows, when the egg in question
-   has had changed contents but still has the same version number.
-
-0.6a4
- * Fix a bug in ``WorkingSet.resolve()`` that was introduced in 0.6a3.
-
-0.6a3
- * Added ``safe_extra()`` parsing utility routine, and use it for Requirement,
-   EntryPoint, and Distribution objects' extras handling.
-
-0.6a1
- * Enhanced performance of ``require()`` and related operations when all
-   requirements are already in the working set, and enhanced performance of
-   directory scanning for distributions.
-
- * Fixed some problems using ``pkg_resources`` w/PEP 302 loaders other than
-   ``zipimport``, and the previously-broken "eager resource" support.
-
- * Fixed ``pkg_resources.resource_exists()`` not working correctly, along with
-   some other resource API bugs.
-
- * Many API changes and enhancements:
-
-   * Added ``EntryPoint``, ``get_entry_map``, ``load_entry_point``, and
-     ``get_entry_info`` APIs for dynamic plugin discovery.
-
-   * ``list_resources`` is now ``resource_listdir`` (and it actually works)
-
-   * Resource API functions like ``resource_string()`` that accepted a package
-     name and resource name, will now also accept a ``Requirement`` object in
-     place of the package name (to allow access to non-package data files in
-     an egg).
-
-   * ``get_provider()`` will now accept a ``Requirement`` instance or a module
-     name.  If it is given a ``Requirement``, it will return a corresponding
-     ``Distribution`` (by calling ``require()`` if a suitable distribution
-     isn't already in the working set), rather than returning a metadata and
-     resource provider for a specific module.  (The difference is in how
-     resource paths are interpreted; supplying a module name means resources
-     path will be module-relative, rather than relative to the distribution's
-     root.)
-
-   * ``Distribution`` objects now implement the ``IResourceProvider`` and
-     ``IMetadataProvider`` interfaces, so you don't need to reference the (no
-     longer available) ``metadata`` attribute to get at these interfaces.
-
-   * ``Distribution`` and ``Requirement`` both have a ``project_name``
-     attribute for the project name they refer to.  (Previously these were
-     ``name`` and ``distname`` attributes.)
-
-   * The ``path`` attribute of ``Distribution`` objects is now ``location``,
-     because it isn't necessarily a filesystem path (and hasn't been for some
-     time now).  The ``location`` of ``Distribution`` objects in the filesystem
-     should always be normalized using ``pkg_resources.normalize_path()``; all
-     of the setuptools and EasyInstall code that generates distributions from
-     the filesystem (including ``Distribution.from_filename()``) ensure this
-     invariant, but if you use a more generic API like ``Distribution()`` or
-     ``Distribution.from_location()`` you should take care that you don't
-     create a distribution with an un-normalized filesystem path.
-
-   * ``Distribution`` objects now have an ``as_requirement()`` method that
-     returns a ``Requirement`` for the distribution's project name and version.
-
-   * Distribution objects no longer have an ``installed_on()`` method, and the
-     ``install_on()`` method is now ``activate()`` (but may go away altogether
-     soon).  The ``depends()`` method has also been renamed to ``requires()``,
-     and ``InvalidOption`` is now ``UnknownExtra``.
-
-   * ``find_distributions()`` now takes an additional argument called ``only``,
-     that tells it to only yield distributions whose location is the passed-in
-     path.  (It defaults to False, so that the default behavior is unchanged.)
-
-   * ``AvailableDistributions`` is now called ``Environment``, and the
-     ``get()``, ``__len__()``, and ``__contains__()`` methods were removed,
-     because they weren't particularly useful.  ``__getitem__()`` no longer
-     raises ``KeyError``; it just returns an empty list if there are no
-     distributions for the named project.
-
-   * The ``resolve()`` method of ``Environment`` is now a method of
-     ``WorkingSet`` instead, and the ``best_match()`` method now uses a working
-     set instead of a path list as its second argument.
-
-   * There is a new ``pkg_resources.add_activation_listener()`` API that lets
-     you register a callback for notifications about distributions added to
-     ``sys.path`` (including the distributions already on it).  This is
-     basically a hook for extensible applications and frameworks to be able to
-     search for plugin metadata in distributions added at runtime.
-
-0.5a13
- * Fixed a bug in resource extraction from nested packages in a zipped egg.
-
-0.5a12
- * Updated extraction/cache mechanism for zipped resources to avoid inter-
-   process and inter-thread races during extraction.  The default cache
-   location can now be set via the ``PYTHON_EGGS_CACHE`` environment variable,
-   and the default Windows cache is now a ``Python-Eggs`` subdirectory of the
-   current user's "Application Data" directory, if the ``PYTHON_EGGS_CACHE``
-   variable isn't set.
-
-0.5a10
- * Fix a problem with ``pkg_resources`` being confused by non-existent eggs on
-   ``sys.path`` (e.g. if a user deletes an egg without removing it from the
-   ``easy-install.pth`` file).
-
- * Fix a problem with "basket" support in ``pkg_resources``, where egg-finding
-   never actually went inside ``.egg`` files.
-
- * Made ``pkg_resources`` import the module you request resources from, if it's
-   not already imported.
-
-0.5a4
- * ``pkg_resources.AvailableDistributions.resolve()`` and related methods now
-   accept an ``installer`` argument: a callable taking one argument, a
-   ``Requirement`` instance.  The callable must return a ``Distribution``
-   object, or ``None`` if no distribution is found.  This feature is used by
-   EasyInstall to resolve dependencies by recursively invoking itself.
-
-0.4a4
- * Fix problems with ``resource_listdir()``, ``resource_isdir()`` and resource
-   directory extraction for zipped eggs.
-
-0.4a3
- * Fixed scripts not being able to see a ``__file__`` variable in ``__main__``
-
- * Fixed a problem with ``resource_isdir()`` implementation that was introduced
-   in 0.4a2.
-
-0.4a1
- * Fixed a bug in requirements processing for exact versions (i.e. ``==`` and
-   ``!=``) when only one condition was included.
-
- * Added ``safe_name()`` and ``safe_version()`` APIs to clean up handling of
-   arbitrary distribution names and versions found on PyPI.
-
-0.3a4
- * ``pkg_resources`` now supports resource directories, not just the resources
-   in them.  In particular, there are ``resource_listdir()`` and
-   ``resource_isdir()`` APIs.
-
- * ``pkg_resources`` now supports "egg baskets" -- .egg zipfiles which contain
-   multiple distributions in subdirectories whose names end with ``.egg``.
-   Having such a "basket" in a directory on ``sys.path`` is equivalent to
-   having the individual eggs in that directory, but the contained eggs can
-   be individually added (or not) to ``sys.path``.  Currently, however, there
-   is no automated way to create baskets.
-
- * Namespace package manipulation is now protected by the Python import lock.
-
-0.3a1
- * Initial release.
-
diff --git a/vendor/distribute-0.6.34/docs/python3.txt b/vendor/distribute-0.6.34/docs/python3.txt
deleted file mode 100644
index 2f6cde4ab35ec9ddfd3d551310cac2586091ab46..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/python3.txt
+++ /dev/null
@@ -1,121 +0,0 @@
-=====================================================
-Supporting both Python 2 and Python 3 with Distribute
-=====================================================
-
-Starting with version 0.6.2, Distribute supports Python 3. Installing and
-using distribute for Python 3 code works exactly the same as for Python 2
-code, but Distribute also helps you to support Python 2 and Python 3 from
-the same source code by letting you run 2to3 on the code as a part of the
-build process, by setting the keyword parameter ``use_2to3`` to True.
-
-
-Distribute as help during porting
-=================================
-
-Distribute can make the porting process much easier by automatically running
-2to3 as a part of the test running. To do this you need to configure the
-setup.py so that you can run the unit tests with ``python setup.py test``.
-
-See :ref:`test` for more information on this.
-
-Once you have the tests running under Python 2, you can add the use_2to3
-keyword parameters to setup(), and start running the tests under Python 3.
-The test command will now first run the build command during which the code
-will be converted with 2to3, and the tests will then be run from the build
-directory, as opposed from the source directory as is normally done.
-
-Distribute will convert all Python files, and also all doctests in Python
-files. However, if you have doctests located in separate text files, these
-will not automatically be converted. By adding them to the
-``convert_2to3_doctests`` keyword parameter Distrubute will convert them as
-well.
-
-By default, the conversion uses all fixers in the ``lib2to3.fixers`` package.
-To use additional fixers, the parameter ``use_2to3_fixers`` can be set
-to a list of names of packages containing fixers. To exclude fixers, the
-parameter ``use_2to3_exclude_fixers`` can be set to fixer names to be
-skipped.
-
-A typical setup.py can look something like this::
-
-    from setuptools import setup
-
-    setup(
-        name='your.module',
-        version = '1.0',
-        description='This is your awesome module',
-        author='You',
-        author_email='your@email',
-        package_dir = {'': 'src'},
-        packages = ['your', 'you.module'],
-        test_suite = 'your.module.tests',
-        use_2to3 = True,
-        convert_2to3_doctests = ['src/your/module/README.txt'],
-        use_2to3_fixers = ['your.fixers'],
-        use_2to3_exclude_fixers = ['lib2to3.fixes.fix_import'],
-    )
-
-Differential conversion
------------------------
-
-Note that a file will only be copied and converted during the build process
-if the source file has been changed. If you add a file to the doctests
-that should be converted, it will not be converted the next time you run
-the tests, since it hasn't been modified. You need to remove it from the
-build directory. Also if you run the build, install or test commands before
-adding the use_2to3 parameter, you will have to remove the build directory
-before you run the test command, as the files otherwise will seem updated,
-and no conversion will happen.
-
-In general, if code doesn't seem to be converted, deleting the build directory
-and trying again is a good saferguard against the build directory getting
-"out of sync" with the source directory.
-
-Distributing Python 3 modules
-=============================
-
-You can distribute your modules with Python 3 support in different ways. A
-normal source distribution will work, but can be slow in installing, as the
-2to3 process will be run during the install. But you can also distribute
-the module in binary format, such as a binary egg. That egg will contain the
-already converted code, and hence no 2to3 conversion is needed during install.
-
-Advanced features
-=================
-
-If you don't want to run the 2to3 conversion on the doctests in Python files,
-you can turn that off by setting ``setuptools.use_2to3_on_doctests = False``.
-
-Note on compatibility with setuptools
-=====================================
-
-Setuptools do not know about the new keyword parameters to support Python 3.
-As a result it will warn about the unknown keyword parameters if you use
-setuptools instead of Distribute under Python 2. This is not an error, and
-install process will continue as normal, but if you want to get rid of that
-error this is easy. Simply conditionally add the new parameters into an extra
-dict and pass that dict into setup()::
-
-    from setuptools import setup
-    import sys
-
-    extra = {}
-    if sys.version_info >= (3,):
-        extra['use_2to3'] = True
-        extra['convert_2to3_doctests'] = ['src/your/module/README.txt']
-        extra['use_2to3_fixers'] = ['your.fixers']
-
-    setup(
-        name='your.module',
-        version = '1.0',
-        description='This is your awesome module',
-        author='You',
-        author_email='your@email',
-        package_dir = {'': 'src'},
-        packages = ['your', 'you.module'],
-        test_suite = 'your.module.tests',
-        **extra
-    )
-
-This way the parameters will only be used under Python 3, where you have to
-use Distribute.
diff --git a/vendor/distribute-0.6.34/docs/roadmap.txt b/vendor/distribute-0.6.34/docs/roadmap.txt
deleted file mode 100644
index ea5070eaaf8797a273928df2b3791ac669920cdb..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/roadmap.txt
+++ /dev/null
@@ -1,86 +0,0 @@
-=======
-Roadmap
-=======
-
-Distribute has two branches:
-
-- 0.6.x : provides a Setuptools-0.6cX compatible version
-- 0.7.x : will provide a refactoring
-
-0.6.x
-=====
-
-Not "much" is going to happen here, we want this branch to be helpful
-to the community *today* by addressing the 40-or-so bugs
-that were found in Setuptools and never fixed. This is eventually
-happen soon because its development is
-fast : there are up to 5 commiters that are working on it very often
-(and the number grows weekly.)
-
-The biggest issue with this branch is that it is providing the same
-packages and modules setuptools does, and this
-requires some bootstrapping work where we make sure once Distribute is
-installed, all Distribution that requires Setuptools
-will continue to work. This is done by faking the metadata of
-Setuptools 0.6c9. That's the only way we found to do this.
-
-There's one major thing though: thanks to the work of Lennart, Alex,
-Martin, this branch supports Python 3,
-which is great to have to speed up Py3 adoption.
-
-The goal of the 0.6.x is to remove as much bugs as we can, and try if
-possible to remove the patches done
-on Distutils. We will support 0.6.x maintenance for years and we will
-promote its usage everywhere instead of
-Setuptools.
-
-Some new commands are added there, when they are helpful and don't
-interact with the rest. I am thinking
-about "upload_docs" that let you upload documentation to PyPI. The
-goal is to move it to Distutils
-at some point, if the documentation feature of PyPI stays and starts to be used.
-
-0.7.x
-=====
-
-We've started to refactor Distribute with this roadmap in mind (and
-no, as someone said, it's not vaporware,
-we've done a lot already)
-
-- 0.7.x can be installed and used with 0.6.x
-
-- easy_install is going to be deprecated ! use Pip !
-
-- the version system will be deprecated, in favor of the one in Distutils
-
-- no more Distutils monkey-patch that happens once you use the code
-  (things like 'from distutils import cmd; cmd.Command = CustomCommand')
-
-- no more custom site.py (that is: if something misses in Python's
-  site.py we'll add it there instead of patching it)
-
-- no more namespaced packages system, if PEP 382 (namespaces package
-  support) makes it to 2.7
-
-- The code is splitted in many packages and might be distributed under
-  several distributions.
-
- - distribute.resources: that's the old pkg_resources, but
-   reorganized in clean, pep-8 modules. This package will
-   only contain the query APIs and will focus on being PEP 376
-   compatible. We will promote its usage and see if Pip wants
-   to use it as a basis.
-   It will probably shrink a lot though, once the stdlib provides PEP 376 support.
-
- - distribute.entrypoints: that's the old pkg_resources entry points
-   system, but on its own. it uses distribute.resources
-
- - distribute.index: that's package_index and a few other things.
-   everything required to interact with PyPI. We will promote
-   its usage and see if Pip wants to use it as a basis.
-
- - distribute.core (might be renamed to main): that's everything
-   else, and uses the other packages.
-
-Goal: A first release before (or when) Python 2.7 / 3.2 is out.
-
diff --git a/vendor/distribute-0.6.34/docs/setuptools.txt b/vendor/distribute-0.6.34/docs/setuptools.txt
deleted file mode 100644
index 31ecc931f259d3d4d13ceed1663b8e6aa088b0ca..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/setuptools.txt
+++ /dev/null
@@ -1,3236 +0,0 @@
-==================================================
-Building and Distributing Packages with Distribute
-==================================================
-
-``Distribute`` is a collection of enhancements to the Python ``distutils``
-(for Python 2.3.5 and up on most platforms; 64-bit platforms require a minimum
-of Python 2.4) that allow you to more easily build and distribute Python
-packages, especially ones that have dependencies on other packages.
-
-Packages built and distributed using ``setuptools`` look to the user like
-ordinary Python packages based on the ``distutils``.  Your users don't need to
-install or even know about setuptools in order to use them, and you don't
-have to include the entire setuptools package in your distributions.  By
-including just a single `bootstrap module`_ (an 8K .py file), your package will
-automatically download and install ``setuptools`` if the user is building your
-package from source and doesn't have a suitable version already installed.
-
-.. _bootstrap module: http://nightly.ziade.org/distribute_setup.py
-
-Feature Highlights:
-
-* Automatically find/download/install/upgrade dependencies at build time using
-  the `EasyInstall tool <http://peak.telecommunity.com/DevCenter/EasyInstall>`_,
-  which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
-  automatically scans web pages linked from PyPI to find download links.  (It's
-  the closest thing to CPAN currently available for Python.)
-
-* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
-  a single-file importable distribution format
-
-* Include data files inside your package directories, where your code can
-  actually use them.  (Python 2.4 distutils also supports this feature, but
-  setuptools provides the feature for Python 2.3 packages also, and supports
-  accessing data files in zipped packages too.)
-
-* Automatically include all packages in your source tree, without listing them
-  individually in setup.py
-
-* Automatically include all relevant files in your source distributions,
-  without needing to create a ``MANIFEST.in`` file, and without having to force
-  regeneration of the ``MANIFEST`` file when your source tree changes.
-
-* Automatically generate wrapper scripts or Windows (console and GUI) .exe
-  files for any number of "main" functions in your project.  (Note: this is not
-  a py2exe replacement; the .exe files rely on the local Python installation.)
-
-* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
-  still work even when the end-user doesn't have Pyrex installed (as long as
-  you include the Pyrex-generated C in your source distribution)
-
-* Command aliases - create project-specific, per-user, or site-wide shortcut
-  names for commonly used commands and options
-
-* PyPI upload support - upload your source distributions and eggs to PyPI
-
-* Deploy your project in "development mode", such that it's available on
-  ``sys.path``, yet can still be edited directly from its source checkout.
-
-* Easily extend the distutils with new commands or ``setup()`` arguments, and
-  distribute/reuse your extensions for multiple projects, without copying code.
-
-* Create extensible applications and frameworks that automatically discover
-  extensions, using simple "entry points" declared in a project's setup script.
-
-In addition to the PyPI downloads, the development version of ``setuptools``
-is available from the `Python SVN sandbox`_, and in-development versions of the
-`0.6 branch`_ are available as well.
-
-.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
-
-.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
-
-.. contents:: **Table of Contents**
-
-.. _distribute_setup.py: `bootstrap module`_
-
-
------------------
-Developer's Guide
------------------
-
-
-Installing ``setuptools``
-=========================
-
-Please follow the `EasyInstall Installation Instructions`_ to install the
-current stable version of setuptools.  In particular, be sure to read the
-section on `Custom Installation Locations`_ if you are installing anywhere
-other than Python's ``site-packages`` directory.
-
-.. _EasyInstall Installation Instructions: http://peak.telecommunity.com/DevCenter/EasyInstall#installation-instructions
-
-.. _Custom Installation Locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
-
-If you want the current in-development version of setuptools, you should first
-install a stable version, and then run::
-
-    distribute_setup.py setuptools==dev
-
-This will download and install the latest development (i.e. unstable) version
-of setuptools from the Python Subversion sandbox.
-
-
-Basic Use
-=========
-
-For basic use of setuptools, just import things from setuptools instead of
-the distutils.  Here's a minimal setup script using setuptools::
-
-    from setuptools import setup, find_packages
-    setup(
-        name = "HelloWorld",
-        version = "0.1",
-        packages = find_packages(),
-    )
-
-As you can see, it doesn't take much to use setuptools in a project.
-Just by doing the above, this project will be able to produce eggs, upload to
-PyPI, and automatically include all packages in the directory where the
-setup.py lives.  See the `Command Reference`_ section below to see what
-commands you can give to this setup script.
-
-Of course, before you release your project to PyPI, you'll want to add a bit
-more information to your setup script to help people find or learn about your
-project.  And maybe your project will have grown by then to include a few
-dependencies, and perhaps some data files and scripts::
-
-    from setuptools import setup, find_packages
-    setup(
-        name = "HelloWorld",
-        version = "0.1",
-        packages = find_packages(),
-        scripts = ['say_hello.py'],
-
-        # Project uses reStructuredText, so ensure that the docutils get
-        # installed or upgraded on the target machine
-        install_requires = ['docutils>=0.3'],
-
-        package_data = {
-            # If any package contains *.txt or *.rst files, include them:
-            '': ['*.txt', '*.rst'],
-            # And include any *.msg files found in the 'hello' package, too:
-            'hello': ['*.msg'],
-        },
-
-        # metadata for upload to PyPI
-        author = "Me",
-        author_email = "me@example.com",
-        description = "This is an Example Package",
-        license = "PSF",
-        keywords = "hello world example examples",
-        url = "http://example.com/HelloWorld/",   # project home page, if any
-
-        # could also include long_description, download_url, classifiers, etc.
-    )
-
-In the sections that follow, we'll explain what most of these ``setup()``
-arguments do (except for the metadata ones), and the various ways you might use
-them in your own project(s).
-
-
-Specifying Your Project's Version
----------------------------------
-
-Setuptools can work well with most versioning schemes; there are, however, a
-few special things to watch out for, in order to ensure that setuptools and
-EasyInstall can always tell what version of your package is newer than another
-version.  Knowing these things will also help you correctly specify what
-versions of other projects your project depends on.
-
-A version consists of an alternating series of release numbers and pre-release
-or post-release tags.  A release number is a series of digits punctuated by
-dots, such as ``2.4`` or ``0.5``.  Each series of digits is treated
-numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the
-same release number, denoting the first subrelease of release 2.  But  ``2.10``
-is the *tenth* subrelease of release 2, and so is a different and newer release
-from ``2.1`` or ``2.1.0``.  Leading zeros within a series of digits are also
-ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``.
-
-Following a release number, you can have either a pre-release or post-release
-tag.  Pre-release tags make a version be considered *older* than the version
-they are appended to.  So, revision ``2.4`` is *newer* than revision ``2.4c1``,
-which in turn is newer than ``2.4b1`` or ``2.4a1``.  Postrelease tags make
-a version be considered *newer* than the version they are appended to.  So,
-revisions like ``2.4-1`` and ``2.4pl3`` are newer than ``2.4``, but are *older*
-than ``2.4.1`` (which has a higher release number).
-
-A pre-release tag is a series of letters that are alphabetically before
-"final".  Some examples of prerelease tags would include ``alpha``, ``beta``,
-``a``, ``c``, ``dev``, and so on.  You do not have to place a dot before
-the prerelease tag if it's immediately after a number, but it's okay to do
-so if you prefer.  Thus, ``2.4c1`` and ``2.4.c1`` both represent release
-candidate 1 of version ``2.4``, and are treated as identical by setuptools.
-
-In addition, there are three special prerelease tags that are treated as if
-they were the letter ``c``: ``pre``, ``preview``, and ``rc``.  So, version
-``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as
-``2.4c1``, and are treated as identical by setuptools.
-
-A post-release tag is either a series of letters that are alphabetically
-greater than or equal to "final", or a dash (``-``).  Post-release tags are
-generally used to separate patch numbers, port numbers, build numbers, revision
-numbers, or date stamps from the release number.  For example, the version
-``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of
-version ``2.4``.  Or you might use ``2.4-20051127`` to denote a date-stamped
-post-release.
-
-Notice that after each pre or post-release tag, you are free to place another
-release number, followed again by more pre- or post-release tags.  For example,
-``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in-
-development version of the ninth alpha of release 0.6.  Notice that ``dev`` is
-a pre-release tag, so this version is a *lower* version number than ``0.6a9``,
-which would be the actual ninth alpha of release 0.6.  But the ``-r41475`` is
-a post-release tag, so this version is *newer* than ``0.6a9.dev``.
-
-For the most part, setuptools' interpretation of version numbers is intuitive,
-but here are a few tips that will keep you out of trouble in the corner cases:
-
-* Don't use ``-`` or any other character than ``.`` as a separator, unless you
-  really want a post-release.  Remember that ``2.1-rc2`` means you've
-  *already* released ``2.1``, whereas ``2.1rc2`` and ``2.1.c2`` are candidates
-  you're putting out *before* ``2.1``.  If you accidentally distribute copies
-  of a post-release that you meant to be a pre-release, the only safe fix is to
-  bump your main release number (e.g. to ``2.1.1``) and re-release the project.
-
-* Don't stick adjoining pre-release tags together without a dot or number
-  between them.  Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``,
-  *not* a development pre-release of ``1.9a``.  Use ``.dev`` instead, as in
-  ``1.9a.dev``, or separate the prerelease tags with a number, as in
-  ``1.9a0dev``.  ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are
-  identical versions from setuptools' point of view, so you can use whatever
-  scheme you prefer.
-
-* If you want to be certain that your chosen numbering scheme works the way
-  you think it will, you can use the ``pkg_resources.parse_version()`` function
-  to compare different version numbers::
-
-    >>> from pkg_resources import parse_version
-    >>> parse_version('1.9.a.dev') == parse_version('1.9a0dev')
-    True
-    >>> parse_version('2.1-rc2') < parse_version('2.1')
-    False
-    >>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9')
-    True
-
-Once you've decided on a version numbering scheme for your project, you can
-have setuptools automatically tag your in-development releases with various
-pre- or post-release tags.  See the following sections for more details:
-
-* `Tagging and "Daily Build" or "Snapshot" Releases`_
-* `Managing "Continuous Releases" Using Subversion`_
-* The `egg_info`_ command
-
-
-New and Changed ``setup()`` Keywords
-====================================
-
-The following keyword arguments to ``setup()`` are added or changed by
-``setuptools``.  All of them are optional; you do not have to supply them
-unless you need the associated ``setuptools`` feature.
-
-``include_package_data``
-    If set to ``True``, this tells ``setuptools`` to automatically include any
-    data files it finds inside your package directories, that are either under
-    CVS or Subversion control, or which are specified by your ``MANIFEST.in``
-    file.  For more information, see the section below on `Including Data
-    Files`_.
-
-``exclude_package_data``
-    A dictionary mapping package names to lists of glob patterns that should
-    be *excluded* from your package directories.  You can use this to trim back
-    any excess files included by ``include_package_data``.  For a complete
-    description and examples, see the section below on `Including Data Files`_.
-
-``package_data``
-    A dictionary mapping package names to lists of glob patterns.  For a
-    complete description and examples, see the section below on `Including
-    Data Files`_.  You do not need to use this option if you are using
-    ``include_package_data``, unless you need to add e.g. files that are
-    generated by your setup script and build process.  (And are therefore not
-    in source control or are files that you don't want to include in your
-    source distribution.)
-
-``zip_safe``
-    A boolean (True or False) flag specifying whether the project can be
-    safely installed and run from a zip file.  If this argument is not
-    supplied, the ``bdist_egg`` command will have to analyze all of your
-    project's contents for possible problems each time it buids an egg.
-
-``install_requires``
-    A string or list of strings specifying what other distributions need to
-    be installed when this one is.  See the section below on `Declaring
-    Dependencies`_ for details and examples of the format of this argument.
-
-``entry_points``
-    A dictionary mapping entry point group names to strings or lists of strings
-    defining the entry points.  Entry points are used to support dynamic
-    discovery of services or plugins provided by a project.  See `Dynamic
-    Discovery of Services and Plugins`_ for details and examples of the format
-    of this argument.  In addition, this keyword is used to support `Automatic
-    Script Creation`_.
-
-``extras_require``
-    A dictionary mapping names of "extras" (optional features of your project)
-    to strings or lists of strings specifying what other distributions must be
-    installed to support those features.  See the section below on `Declaring
-    Dependencies`_ for details and examples of the format of this argument.
-
-``setup_requires``
-    A string or list of strings specifying what other distributions need to
-    be present in order for the *setup script* to run.  ``setuptools`` will
-    attempt to obtain these (even going so far as to download them using
-    ``EasyInstall``) before processing the rest of the setup script or commands.
-    This argument is needed if you are using distutils extensions as part of
-    your build process; for example, extensions that process setup() arguments
-    and turn them into EGG-INFO metadata files.
-
-    (Note: projects listed in ``setup_requires`` will NOT be automatically
-    installed on the system where the setup script is being run.  They are
-    simply downloaded to the setup directory if they're not locally available
-    already.  If you want them to be installed, as well as being available
-    when the setup script is run, you should add them to ``install_requires``
-    **and** ``setup_requires``.)
-
-``dependency_links``
-    A list of strings naming URLs to be searched when satisfying dependencies.
-    These links will be used if needed to install packages specified by
-    ``setup_requires`` or ``tests_require``.  They will also be written into
-    the egg's metadata for use by tools like EasyInstall to use when installing
-    an ``.egg`` file.
-
-``namespace_packages``
-    A list of strings naming the project's "namespace packages".  A namespace
-    package is a package that may be split across multiple project
-    distributions.  For example, Zope 3's ``zope`` package is a namespace
-    package, because subpackages like ``zope.interface`` and ``zope.publisher``
-    may be distributed separately.  The egg runtime system can automatically
-    merge such subpackages into a single parent package at runtime, as long
-    as you declare them in each project that contains any subpackages of the
-    namespace package, and as long as the namespace package's ``__init__.py``
-    does not contain any code other than a namespace declaration.  See the
-    section below on `Namespace Packages`_ for more information.
-
-``test_suite``
-    A string naming a ``unittest.TestCase`` subclass (or a package or module
-    containing one or more of them, or a method of such a subclass), or naming
-    a function that can be called with no arguments and returns a
-    ``unittest.TestSuite``.  If the named suite is a module, and the module
-    has an ``additional_tests()`` function, it is called and the results are
-    added to the tests to be run.  If the named suite is a package, any
-    submodules and subpackages are recursively added to the overall test suite.
-
-    Specifying this argument enables use of the `test`_ command to run the
-    specified test suite, e.g. via ``setup.py test``.  See the section on the
-    `test`_ command below for more details.
-
-``tests_require``
-    If your project's tests need one or more additional packages besides those
-    needed to install it, you can use this option to specify them.  It should
-    be a string or list of strings specifying what other distributions need to
-    be present for the package's tests to run.  When you run the ``test``
-    command, ``setuptools`` will  attempt to obtain these (even going
-    so far as to download them using ``EasyInstall``).  Note that these
-    required projects will *not* be installed on the system where the tests
-    are run, but only downloaded to the project's setup directory if they're
-    not already installed locally.
-
-.. _test_loader:
-
-``test_loader``
-    If you would like to use a different way of finding tests to run than what
-    setuptools normally uses, you can specify a module name and class name in
-    this argument.  The named class must be instantiable with no arguments, and
-    its instances must support the ``loadTestsFromNames()`` method as defined
-    in the Python ``unittest`` module's ``TestLoader`` class.  Setuptools will
-    pass only one test "name" in the `names` argument: the value supplied for
-    the ``test_suite`` argument.  The loader you specify may interpret this
-    string in any way it likes, as there are no restrictions on what may be
-    contained in a ``test_suite`` string.
-
-    The module name and class name must be separated by a ``:``.  The default
-    value of this argument is ``"setuptools.command.test:ScanningLoader"``.  If
-    you want to use the default ``unittest`` behavior, you can specify
-    ``"unittest:TestLoader"`` as your ``test_loader`` argument instead.  This
-    will prevent automatic scanning of submodules and subpackages.
-
-    The module and class you specify here may be contained in another package,
-    as long as you use the ``tests_require`` option to ensure that the package
-    containing the loader class is available when the ``test`` command is run.
-
-``eager_resources``
-    A list of strings naming resources that should be extracted together, if
-    any of them is needed, or if any C extensions included in the project are
-    imported.  This argument is only useful if the project will be installed as
-    a zipfile, and there is a need to have all of the listed resources be
-    extracted to the filesystem *as a unit*.  Resources listed here
-    should be '/'-separated paths, relative to the source root, so to list a
-    resource ``foo.png`` in package ``bar.baz``, you would include the string
-    ``bar/baz/foo.png`` in this argument.
-
-    If you only need to obtain resources one at a time, or you don't have any C
-    extensions that access other files in the project (such as data files or
-    shared libraries), you probably do NOT need this argument and shouldn't
-    mess with it.  For more details on how this argument works, see the section
-    below on `Automatic Resource Extraction`_.
-
-``use_2to3``
-    Convert the source code from Python 2 to Python 3 with 2to3 during the
-    build process. See :doc:`python3` for more details.
-
-``convert_2to3_doctests``
-    List of doctest source files that need to be converted with 2to3.
-    See :doc:`python3` for more details.
-
-``use_2to3_fixers``
-    A list of modules to search for additional fixers to be used during
-    the 2to3 conversion. See :doc:`python3` for more details.
-
-
-Using ``find_packages()``
--------------------------
-
-For simple projects, it's usually easy enough to manually add packages to
-the ``packages`` argument of ``setup()``.  However, for very large projects
-(Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the
-package list updated.  That's what ``setuptools.find_packages()`` is for.
-
-``find_packages()`` takes a source directory, and a list of package names or
-patterns to exclude.  If omitted, the source directory defaults to the same
-directory as the setup script.  Some projects use a ``src`` or ``lib``
-directory as the root of their source tree, and those projects would of course
-use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``.  (And
-such projects also need something like ``package_dir = {'':'src'}`` in their
-``setup()`` arguments, but that's just a normal distutils thing.)
-
-Anyway, ``find_packages()`` walks the target directory, and finds Python
-packages by looking for ``__init__.py`` files.  It then filters the list of
-packages using the exclusion patterns.
-
-Exclusion patterns are package names, optionally including wildcards.  For
-example, ``find_packages(exclude=["*.tests"])`` will exclude all packages whose
-last name part is ``tests``.   Or, ``find_packages(exclude=["*.tests",
-"*.tests.*"])`` will also exclude any subpackages of packages named ``tests``,
-but it still won't exclude a top-level ``tests`` package or the children
-thereof.  In fact, if you really want no ``tests`` packages at all, you'll need
-something like this::
-
-    find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
-
-in order to cover all the bases.  Really, the exclusion patterns are intended
-to cover simpler use cases than this, like excluding a single, specified
-package and its subpackages.
-
-Regardless of the target directory or exclusions, the ``find_packages()``
-function returns a list of package names suitable for use as the ``packages``
-argument to ``setup()``, and so is usually the easiest way to set that
-argument in your setup script.  Especially since it frees you from having to
-remember to modify your setup script whenever your project grows additional
-top-level packages or subpackages.
-
-
-Automatic Script Creation
-=========================
-
-Packaging and installing scripts can be a bit awkward with the distutils.  For
-one thing, there's no easy way to have a script's filename match local
-conventions on both Windows and POSIX platforms.  For another, you often have
-to create a separate file just for the "main" script, when your actual "main"
-is a function in a module somewhere.  And even in Python 2.4, using the ``-m``
-option only works for actual ``.py`` files that aren't installed in a package.
-
-``setuptools`` fixes all of these problems by automatically generating scripts
-for you with the correct extension, and on Windows it will even create an
-``.exe`` file so that users don't have to change their ``PATHEXT`` settings.
-The way to use this feature is to define "entry points" in your setup script
-that indicate what function the generated script should import and run.  For
-example, to create two console scripts called ``foo`` and ``bar``, and a GUI
-script called ``baz``, you might do something like this::
-
-    setup(
-        # other arguments here...
-        entry_points = {
-            'console_scripts': [
-                'foo = my_package.some_module:main_func',
-                'bar = other_module:some_func',
-            ],
-            'gui_scripts': [
-                'baz = my_package_gui.start_func',
-            ]
-        }
-    )
-
-When this project is installed on non-Windows platforms (using "setup.py
-install", "setup.py develop", or by using EasyInstall), a set of ``foo``,
-``bar``, and ``baz`` scripts will be installed that import ``main_func`` and
-``some_func`` from the specified modules.  The functions you specify are called
-with no arguments, and their return value is passed to ``sys.exit()``, so you
-can return an errorlevel or message to print to stderr.
-
-On Windows, a set of ``foo.exe``, ``bar.exe``, and ``baz.exe`` launchers are
-created, alongside a set of ``foo.py``, ``bar.py``, and ``baz.pyw`` files.  The
-``.exe`` wrappers find and execute the right version of Python to run the
-``.py`` or ``.pyw`` file.
-
-You may define as many "console script" and "gui script" entry points as you
-like, and each one can optionally specify "extras" that it depends on, that
-will be added to ``sys.path`` when the script is run.  For more information on
-"extras", see the section below on `Declaring Extras`_.  For more information
-on "entry points" in general, see the section below on `Dynamic Discovery of
-Services and Plugins`_.
-
-
-"Eggsecutable" Scripts
-----------------------
-
-Occasionally, there are situations where it's desirable to make an ``.egg``
-file directly executable.  You can do this by including an entry point such
-as the following::
-
-    setup(
-        # other arguments here...
-        entry_points = {
-            'setuptools.installation': [
-                'eggsecutable = my_package.some_module:main_func',
-            ]
-        }
-    )
-
-Any eggs built from the above setup script will include a short excecutable
-prelude that imports and calls ``main_func()`` from ``my_package.some_module``.
-The prelude can be run on Unix-like platforms (including Mac and Linux) by
-invoking the egg with ``/bin/sh``, or by enabling execute permissions on the
-``.egg`` file.  For the executable prelude to run, the appropriate version of
-Python must be available via the ``PATH`` environment variable, under its
-"long" name.  That is, if the egg is built for Python 2.3, there must be a
-``python2.3`` executable present in a directory on ``PATH``.
-
-This feature is primarily intended to support distribute_setup the installation of
-setuptools itself on non-Windows platforms, but may also be useful for other
-projects as well.
-
-IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or
-invoked via symlinks.  They *must* be invoked using their original filename, in
-order to ensure that, once running, ``pkg_resources`` will know what project
-and version is in use.  The header script will check this and exit with an
-error if the ``.egg`` file has been renamed or is invoked via a symlink that
-changes its base name.
-
-
-Declaring Dependencies
-======================
-
-``setuptools`` supports automatically installing dependencies when a package is
-installed, and including information about dependencies in Python Eggs (so that
-package management tools like EasyInstall can use the information).
-
-``setuptools`` and ``pkg_resources`` use a common syntax for specifying a
-project's required dependencies.  This syntax consists of a project's PyPI
-name, optionally followed by a comma-separated list of "extras" in square
-brackets, optionally followed by a comma-separated list of version
-specifiers.  A version specifier is one of the operators ``<``, ``>``, ``<=``,
-``>=``, ``==`` or ``!=``, followed by a version identifier.  Tokens may be
-separated by whitespace, but any whitespace or nonstandard characters within a
-project name or version identifier must be replaced with ``-``.
-
-Version specifiers for a given project are internally sorted into ascending
-version order, and used to establish what ranges of versions are acceptable.
-Adjacent redundant conditions are also consolidated (e.g. ``">1, >2"`` becomes
-``">1"``, and ``"<2,<3"`` becomes ``"<3"``). ``"!="`` versions are excised from
-the ranges they fall within.  A project's version is then checked for
-membership in the resulting ranges. (Note that providing conflicting conditions
-for the same version (e.g. "<2,>=2" or "==2,!=2") is meaningless and may
-therefore produce bizarre results.)
-
-Here are some example requirement specifiers::
-
-    docutils >= 0.3
-
-    # comment lines and \ continuations are allowed in requirement strings
-    BazSpam ==1.1, ==1.2, ==1.3, ==1.4, ==1.5, \
-        ==1.6, ==1.7  # and so are line-end comments
-
-    PEAK[FastCGI, reST]>=0.5a4
-
-    setuptools==0.5a7
-
-The simplest way to include requirement specifiers is to use the
-``install_requires`` argument to ``setup()``.  It takes a string or list of
-strings containing requirement specifiers.  If you include more than one
-requirement in a string, each requirement must begin on a new line.
-
-This has three effects:
-
-1. When your project is installed, either by using EasyInstall, ``setup.py
-   install``, or ``setup.py develop``, all of the dependencies not already
-   installed will be located (via PyPI), downloaded, built (if necessary),
-   and installed.
-
-2. Any scripts in your project will be installed with wrappers that verify
-   the availability of the specified dependencies at runtime, and ensure that
-   the correct versions are added to ``sys.path`` (e.g. if multiple versions
-   have been installed).
-
-3. Python Egg distributions will include a metadata file listing the
-   dependencies.
-
-Note, by the way, that if you declare your dependencies in ``setup.py``, you do
-*not* need to use the ``require()`` function in your scripts or modules, as
-long as you either install the project or use ``setup.py develop`` to do
-development work on it.  (See `"Development Mode"`_ below for more details on
-using ``setup.py develop``.)
-
-
-Dependencies that aren't in PyPI
---------------------------------
-
-If your project depends on packages that aren't registered in PyPI, you may
-still be able to depend on them, as long as they are available for download
-as:
-
-- an egg, in the standard distutils ``sdist`` format,
-- a single ``.py`` file, or
-- a VCS repository (Subversion, Mercurial, or Git).
-
-You just need to add some URLs to the ``dependency_links`` argument to
-``setup()``.
-
-The URLs must be either:
-
-1. direct download URLs,
-2. the URLs of web pages that contain direct download links, or
-3. the repository's URL
-
-In general, it's better to link to web pages, because it is usually less
-complex to update a web page than to release a new version of your project.
-You can also use a SourceForge ``showfiles.php`` link in the case where a
-package you depend on is distributed via SourceForge.
-
-If you depend on a package that's distributed as a single ``.py`` file, you
-must include an ``"#egg=project-version"`` suffix to the URL, to give a project
-name and version number.  (Be sure to escape any dashes in the name or version
-by replacing them with underscores.)  EasyInstall will recognize this suffix
-and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file
-as an egg.
-
-In the case of a VCS checkout, you should also append ``#egg=project-version``
-in order to identify for what package that checkout should be used. You can
-append ``@REV`` to the URL's path (before the fragment) to specify a revision.
-Additionally, you can also force the VCS being used by prepending the URL with
-a certain prefix. Currently available are:
-
--  ``svn+URL`` for Subversion,
--  ``git+URL`` for Git, and
--  ``hg+URL`` for Mercurial
-
-A more complete example would be:
-
-    ``vcs+proto://host/path@revision#egg=project-version``
-
-Be careful with the version. It should match the one inside the project files.
-If you want do disregard the version, you have to omit it both in the
-``requires`` and in the URL's fragment.
-
-This will do a checkout (or a clone, in Git and Mercurial parlance) to a
-temporary folder and run ``setup.py bdist_egg``.
-
-The ``dependency_links`` option takes the form of a list of URL strings.  For
-example, the below will cause EasyInstall to search the specified page for
-eggs or source distributions, if the package's dependencies aren't already
-installed::
-
-    setup(
-        ...
-        dependency_links = [
-            "http://peak.telecommunity.com/snapshots/"
-        ],
-    )
-
-
-.. _Declaring Extras:
-
-
-Declaring "Extras" (optional features with their own dependencies)
-------------------------------------------------------------------
-
-Sometimes a project has "recommended" dependencies, that are not required for
-all uses of the project.  For example, a project might offer optional PDF
-output if ReportLab is installed, and reStructuredText support if docutils is
-installed.  These optional features are called "extras", and setuptools allows
-you to define their requirements as well.  In this way, other projects that
-require these optional features can force the additional requirements to be
-installed, by naming the desired extras in their ``install_requires``.
-
-For example, let's say that Project A offers optional PDF and reST support::
-
-    setup(
-        name="Project-A",
-        ...
-        extras_require = {
-            'PDF':  ["ReportLab>=1.2", "RXP"],
-            'reST': ["docutils>=0.3"],
-        }
-    )
-
-As you can see, the ``extras_require`` argument takes a dictionary mapping
-names of "extra" features, to strings or lists of strings describing those
-features' requirements.  These requirements will *not* be automatically
-installed unless another package depends on them (directly or indirectly) by
-including the desired "extras" in square brackets after the associated project
-name.  (Or if the extras were listed in a requirement spec on the EasyInstall
-command line.)
-
-Extras can be used by a project's `entry points`_ to specify dynamic
-dependencies.  For example, if Project A includes a "rst2pdf" script, it might
-declare it like this, so that the "PDF" requirements are only resolved if the
-"rst2pdf" script is run::
-
-    setup(
-        name="Project-A",
-        ...
-        entry_points = {
-            'console_scripts':
-                ['rst2pdf = project_a.tools.pdfgen [PDF]'],
-                ['rst2html = project_a.tools.htmlgen'],
-                # more script entry points ...
-        }
-    )
-
-Projects can also use another project's extras when specifying dependencies.
-For example, if project B needs "project A" with PDF support installed, it
-might declare the dependency like this::
-
-    setup(
-        name="Project-B",
-        install_requires = ["Project-A[PDF]"],
-        ...
-    )
-
-This will cause ReportLab to be installed along with project A, if project B is
-installed -- even if project A was already installed.  In this way, a project
-can encapsulate groups of optional "downstream dependencies" under a feature
-name, so that packages that depend on it don't have to know what the downstream
-dependencies are.  If a later version of Project A builds in PDF support and
-no longer needs ReportLab, or if it ends up needing other dependencies besides
-ReportLab in order to provide PDF support, Project B's setup information does
-not need to change, but the right packages will still be installed if needed.
-
-Note, by the way, that if a project ends up not needing any other packages to
-support a feature, it should keep an empty requirements list for that feature
-in its ``extras_require`` argument, so that packages depending on that feature
-don't break (due to an invalid feature name).  For example, if Project A above
-builds in PDF support and no longer needs ReportLab, it could change its
-setup to this::
-
-    setup(
-        name="Project-A",
-        ...
-        extras_require = {
-            'PDF':  [],
-            'reST': ["docutils>=0.3"],
-        }
-    )
-
-so that Package B doesn't have to remove the ``[PDF]`` from its requirement
-specifier.
-
-
-Including Data Files
-====================
-
-The distutils have traditionally allowed installation of "data files", which
-are placed in a platform-specific location.  However, the most common use case
-for data files distributed with a package is for use *by* the package, usually
-by including the data files in the package directory.
-
-Setuptools offers three ways to specify data files to be included in your
-packages.  First, you can simply use the ``include_package_data`` keyword,
-e.g.::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        include_package_data = True
-    )
-
-This tells setuptools to install any data files it finds in your packages.
-The data files must be under CVS or Subversion control, or else they must be
-specified via the distutils' ``MANIFEST.in`` file.  (They can also be tracked
-by another revision control system, using an appropriate plugin.  See the
-section below on `Adding Support for Other Revision Control Systems`_ for
-information on how to write such plugins.)
-
-If the data files are not under version control, or are not in a supported
-version control system, or if you want finer-grained control over what files
-are included (for example, if you have documentation files in your package
-directories and want to exclude them from installation), then you can also use
-the ``package_data`` keyword, e.g.::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        package_data = {
-            # If any package contains *.txt or *.rst files, include them:
-            '': ['*.txt', '*.rst'],
-            # And include any *.msg files found in the 'hello' package, too:
-            'hello': ['*.msg'],
-        }
-    )
-
-The ``package_data`` argument is a dictionary that maps from package names to
-lists of glob patterns.  The globs may include subdirectory names, if the data
-files are contained in a subdirectory of the package.  For example, if the
-package tree looks like this::
-
-    setup.py
-    src/
-        mypkg/
-            __init__.py
-            mypkg.txt
-            data/
-                somefile.dat
-                otherdata.dat
-
-The setuptools setup file might look like this::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        packages = find_packages('src'),  # include all packages under src
-        package_dir = {'':'src'},   # tell distutils packages are under src
-
-        package_data = {
-            # If any package contains *.txt files, include them:
-            '': ['*.txt'],
-            # And include any *.dat files found in the 'data' subdirectory
-            # of the 'mypkg' package, also:
-            'mypkg': ['data/*.dat'],
-        }
-    )
-
-Notice that if you list patterns in ``package_data`` under the empty string,
-these patterns are used to find files in every package, even ones that also
-have their own patterns listed.  Thus, in the above example, the ``mypkg.txt``
-file gets included even though it's not listed in the patterns for ``mypkg``.
-
-Also notice that if you use paths, you *must* use a forward slash (``/``) as
-the path separator, even if you are on Windows.  Setuptools automatically
-converts slashes to appropriate platform-specific separators at build time.
-
-(Note: although the ``package_data`` argument was previously only available in
-``setuptools``, it was also added to the Python ``distutils`` package as of
-Python 2.4; there is `some documentation for the feature`__ available on the
-python.org website.  If using the setuptools-specific ``include_package_data``
-argument, files specified by ``package_data`` will *not* be automatically
-added to the manifest unless they are tracked by a supported version control
-system, or are listed in the MANIFEST.in file.)
-
-__ http://docs.python.org/dist/node11.html
-
-Sometimes, the ``include_package_data`` or ``package_data`` options alone
-aren't sufficient to precisely define what files you want included.  For
-example, you may want to include package README files in your revision control
-system and source distributions, but exclude them from being installed.  So,
-setuptools offers an ``exclude_package_data`` option as well, that allows you
-to do things like this::
-
-    from setuptools import setup, find_packages
-    setup(
-        ...
-        packages = find_packages('src'),  # include all packages under src
-        package_dir = {'':'src'},   # tell distutils packages are under src
-
-        include_package_data = True,    # include everything in source control
-
-        # ...but exclude README.txt from all packages
-        exclude_package_data = { '': ['README.txt'] },
-    )
-
-The ``exclude_package_data`` option is a dictionary mapping package names to
-lists of wildcard patterns, just like the ``package_data`` option.  And, just
-as with that option, a key of ``''`` will apply the given pattern(s) to all
-packages.  However, any files that match these patterns will be *excluded*
-from installation, even if they were listed in ``package_data`` or were
-included as a result of using ``include_package_data``.
-
-In summary, the three options allow you to:
-
-``include_package_data``
-    Accept all data files and directories matched by ``MANIFEST.in`` or found
-    in source control.
-
-``package_data``
-    Specify additional patterns to match files and directories that may or may
-    not be matched by ``MANIFEST.in`` or found in source control.
-
-``exclude_package_data``
-    Specify patterns for data files and directories that should *not* be
-    included when a package is installed, even if they would otherwise have
-    been included due to the use of the preceding options.
-
-NOTE: Due to the way the distutils build process works, a data file that you
-include in your project and then stop including may be "orphaned" in your
-project's build directories, requiring you to run ``setup.py clean --all`` to
-fully remove them.  This may also be important for your users and contributors
-if they track intermediate revisions of your project using Subversion; be sure
-to let them know when you make changes that remove files from inclusion so they
-can run ``setup.py clean --all``.
-
-
-Accessing Data Files at Runtime
--------------------------------
-
-Typically, existing programs manipulate a package's ``__file__`` attribute in
-order to find the location of data files.  However, this manipulation isn't
-compatible with PEP 302-based import hooks, including importing from zip files
-and Python Eggs.  It is strongly recommended that, if you are using data files,
-you should use the `Resource Management API`_ of ``pkg_resources`` to access
-them.  The ``pkg_resources`` module is distributed as part of setuptools, so if
-you're using setuptools to distribute your package, there is no reason not to
-use its resource management API.  See also `Accessing Package Resources`_ for
-a quick example of converting code that uses ``__file__`` to use
-``pkg_resources`` instead.
-
-.. _Resource Management API: http://peak.telecommunity.com/DevCenter/PythonEggs#resource-management
-.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources
-
-
-Non-Package Data Files
-----------------------
-
-The ``distutils`` normally install general "data files" to a platform-specific
-location (e.g. ``/usr/share``).  This feature intended to be used for things
-like documentation, example configuration files, and the like.  ``setuptools``
-does not install these data files in a separate location, however.  They are
-bundled inside the egg file or directory, alongside the Python modules and
-packages.  The data files can also be accessed using the `Resource Management
-API`_, by specifying a ``Requirement`` instead of a package name::
-
-    from pkg_resources import Requirement, resource_filename
-    filename = resource_filename(Requirement.parse("MyProject"),"sample.conf")
-
-The above code will obtain the filename of the "sample.conf" file in the data
-root of the "MyProject" distribution.
-
-Note, by the way, that this encapsulation of data files means that you can't
-actually install data files to some arbitrary location on a user's machine;
-this is a feature, not a bug.  You can always include a script in your
-distribution that extracts and copies your the documentation or data files to
-a user-specified location, at their discretion.  If you put related data files
-in a single directory, you can use ``resource_filename()`` with the directory
-name to get a filesystem directory that then can be copied with the ``shutil``
-module.  (Even if your package is installed as a zipfile, calling
-``resource_filename()`` on a directory will return an actual filesystem
-directory, whose contents will be that entire subtree of your distribution.)
-
-(Of course, if you're writing a new package, you can just as easily place your
-data files or directories inside one of your packages, rather than using the
-distutils' approach.  However, if you're updating an existing application, it
-may be simpler not to change the way it currently specifies these data files.)
-
-
-Automatic Resource Extraction
------------------------------
-
-If you are using tools that expect your resources to be "real" files, or your
-project includes non-extension native libraries or other files that your C
-extensions expect to be able to access, you may need to list those files in
-the ``eager_resources`` argument to ``setup()``, so that the files will be
-extracted together, whenever a C extension in the project is imported.
-
-This is especially important if your project includes shared libraries *other*
-than distutils-built C extensions, and those shared libraries use file
-extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the
-extensions that setuptools 0.6a8 and higher automatically detects as shared
-libraries and adds to the ``native_libs.txt`` file for you.  Any shared
-libraries whose names do not end with one of those extensions should be listed
-as ``eager_resources``, because they need to be present in the filesystem when
-he C extensions that link to them are used.
-
-The ``pkg_resources`` runtime for compressed packages will automatically
-extract *all* C extensions and ``eager_resources`` at the same time, whenever
-*any* C extension or eager resource is requested via the ``resource_filename()``
-API.  (C extensions are imported using ``resource_filename()`` internally.)
-This ensures that C extensions will see all of the "real" files that they
-expect to see.
-
-Note also that you can list directory resource names in ``eager_resources`` as
-well, in which case the directory's contents (including subdirectories) will be
-extracted whenever any C extension or eager resource is requested.
-
-Please note that if you're not sure whether you need to use this argument, you
-don't!  It's really intended to support projects with lots of non-Python
-dependencies and as a last resort for crufty projects that can't otherwise
-handle being compressed.  If your package is pure Python, Python plus data
-files, or Python plus C, you really don't need this.  You've got to be using
-either C or an external program that needs "real" files in your project before
-there's any possibility of ``eager_resources`` being relevant to your project.
-
-
-Extensible Applications and Frameworks
-======================================
-
-
-.. _Entry Points:
-
-Dynamic Discovery of Services and Plugins
------------------------------------------
-
-``setuptools`` supports creating libraries that "plug in" to extensible
-applications and frameworks, by letting you register "entry points" in your
-project that can be imported by the application or framework.
-
-For example, suppose that a blogging tool wants to support plugins
-that provide translation for various file types to the blog's output format.
-The framework might define an "entry point group" called ``blogtool.parsers``,
-and then allow plugins to register entry points for the file extensions they
-support.
-
-This would allow people to create distributions that contain one or more
-parsers for different file types, and then the blogging tool would be able to
-find the parsers at runtime by looking up an entry point for the file
-extension (or mime type, or however it wants to).
-
-Note that if the blogging tool includes parsers for certain file formats, it
-can register these as entry points in its own setup script, which means it
-doesn't have to special-case its built-in formats.  They can just be treated
-the same as any other plugin's entry points would be.
-
-If you're creating a project that plugs in to an existing application or
-framework, you'll need to know what entry points or entry point groups are
-defined by that application or framework.  Then, you can register entry points
-in your setup script.  Here are a few examples of ways you might register an
-``.rst`` file parser entry point in the ``blogtool.parsers`` entry point group,
-for our hypothetical blogging tool::
-
-    setup(
-        # ...
-        entry_points = {'blogtool.parsers': '.rst = some_module:SomeClass'}
-    )
-
-    setup(
-        # ...
-        entry_points = {'blogtool.parsers': ['.rst = some_module:a_func']}
-    )
-
-    setup(
-        # ...
-        entry_points = """
-            [blogtool.parsers]
-            .rst = some.nested.module:SomeClass.some_classmethod [reST]
-        """,
-        extras_require = dict(reST = "Docutils>=0.3.5")
-    )
-
-The ``entry_points`` argument to ``setup()`` accepts either a string with
-``.ini``-style sections, or a dictionary mapping entry point group names to
-either strings or lists of strings containing entry point specifiers.  An
-entry point specifier consists of a name and value, separated by an ``=``
-sign.  The value consists of a dotted module name, optionally followed by a
-``:`` and a dotted identifier naming an object within the module.  It can
-also include a bracketed list of "extras" that are required for the entry
-point to be used.  When the invoking application or framework requests loading
-of an entry point, any requirements implied by the associated extras will be
-passed to ``pkg_resources.require()``, so that an appropriate error message
-can be displayed if the needed package(s) are missing.  (Of course, the
-invoking app or framework can ignore such errors if it wants to make an entry
-point optional if a requirement isn't installed.)
-
-
-Defining Additional Metadata
-----------------------------
-
-Some extensible applications and frameworks may need to define their own kinds
-of metadata to include in eggs, which they can then access using the
-``pkg_resources`` metadata APIs.  Ordinarily, this is done by having plugin
-developers include additional files in their ``ProjectName.egg-info``
-directory.  However, since it can be tedious to create such files by hand, you
-may want to create a distutils extension that will create the necessary files
-from arguments to ``setup()``, in much the same way that ``setuptools`` does
-for many of the ``setup()`` arguments it adds.  See the section below on
-`Creating distutils Extensions`_ for more details, especially the subsection on
-`Adding new EGG-INFO Files`_.
-
-
-"Development Mode"
-==================
-
-Under normal circumstances, the ``distutils`` assume that you are going to
-build a distribution of your project, not use it in its "raw" or "unbuilt"
-form.  If you were to use the ``distutils`` that way, you would have to rebuild
-and reinstall your project every time you made a change to it during
-development.
-
-Another problem that sometimes comes up with the ``distutils`` is that you may
-need to do development on two related projects at the same time.  You may need
-to put both projects' packages in the same directory to run them, but need to
-keep them separate for revision control purposes.  How can you do this?
-
-Setuptools allows you to deploy your projects for use in a common directory or
-staging area, but without copying any files.  Thus, you can edit each project's
-code in its checkout directory, and only need to run build commands when you
-change a project's C extensions or similarly compiled files.  You can even
-deploy a project into another project's checkout directory, if that's your
-preferred way of working (as opposed to using a common independent staging area
-or the site-packages directory).
-
-To do this, use the ``setup.py develop`` command.  It works very similarly to
-``setup.py install`` or the EasyInstall tool, except that it doesn't actually
-install anything.  Instead, it creates a special ``.egg-link`` file in the
-deployment directory, that links to your project's source code.  And, if your
-deployment directory is Python's ``site-packages`` directory, it will also
-update the ``easy-install.pth`` file to include your project's source code,
-thereby making it available on ``sys.path`` for all programs using that Python
-installation.
-
-If you have enabled the ``use_2to3`` flag, then of course the ``.egg-link``
-will not link directly to your source code when run under Python 3, since
-that source code would be made for Python 2 and not work under Python 3.
-Instead the ``setup.py develop`` will build Python 3 code under the ``build``
-directory, and link there. This means that after doing code changes you will
-have to run ``setup.py build`` before these changes are picked up by your
-Python 3 installation.
-
-In addition, the ``develop`` command creates wrapper scripts in the target
-script directory that will run your in-development scripts after ensuring that
-all your ``install_requires`` packages are available on ``sys.path``.
-
-You can deploy the same project to multiple staging areas, e.g. if you have
-multiple projects on the same machine that are sharing the same project you're
-doing development work.
-
-When you're done with a given development task, you can remove the project
-source from a staging area using ``setup.py develop --uninstall``, specifying
-the desired staging area if it's not the default.
-
-There are several options to control the precise behavior of the ``develop``
-command; see the section on the `develop`_ command below for more details.
-
-Note that you can also apply setuptools commands to non-setuptools projects,
-using commands like this::
-
-   python -c "import setuptools; execfile('setup.py')" develop
-
-That is, you can simply list the normal setup commands and options following
-the quoted part.
-
-
-Distributing a ``setuptools``-based project
-===========================================
-
-Using ``setuptools``...  Without bundling it!
----------------------------------------------
-
-Your users might not have ``setuptools`` installed on their machines, or even
-if they do, it might not be the right version.  Fixing this is easy; just
-download `distribute_setup.py`_, and put it in the same directory as your ``setup.py``
-script.  (Be sure to add it to your revision control system, too.)  Then add
-these two lines to the very top of your setup script, before the script imports
-anything from setuptools:
-
-.. code-block:: python
-
-    import distribute_setup
-    distribute_setup.use_setuptools()
-
-That's it.  The ``distribute_setup`` module will automatically download a matching
-version of ``setuptools`` from PyPI, if it isn't present on the target system.
-Whenever you install an updated version of setuptools, you should also update
-your projects' ``distribute_setup.py`` files, so that a matching version gets installed
-on the target machine(s).
-
-By the way, setuptools supports the new PyPI "upload" command, so you can use
-``setup.py sdist upload`` or ``setup.py bdist_egg upload`` to upload your
-source or egg distributions respectively.  Your project's current version must
-be registered with PyPI first, of course; you can use ``setup.py register`` to
-do that.  Or you can do it all in one step, e.g. ``setup.py register sdist
-bdist_egg upload`` will register the package, build source and egg
-distributions, and then upload them both to PyPI, where they'll be easily
-found by other projects that depend on them.
-
-(By the way, if you need to distribute a specific version of ``setuptools``,
-you can specify the exact version and base download URL as parameters to the
-``use_setuptools()`` function.  See the function's docstring for details.)
-
-
-What Your Users Should Know
----------------------------
-
-In general, a setuptools-based project looks just like any distutils-based
-project -- as long as your users have an internet connection and are installing
-to ``site-packages``, that is.  But for some users, these conditions don't
-apply, and they may become frustrated if this is their first encounter with
-a setuptools-based project.  To keep these users happy, you should review the
-following topics in your project's installation instructions, if they are
-relevant to your project and your target audience isn't already familiar with
-setuptools and ``easy_install``.
-
-Network Access
-    If your project is using ``distribute_setup``, you should inform users of the
-    need to either have network access, or to preinstall the correct version of
-    setuptools using the `EasyInstall installation instructions`_.  Those
-    instructions also have tips for dealing with firewalls as well as how to
-    manually download and install setuptools.
-
-Custom Installation Locations
-    You should inform your users that if they are installing your project to
-    somewhere other than the main ``site-packages`` directory, they should
-    first install setuptools using the instructions for `Custom Installation
-    Locations`_, before installing your project.
-
-Your Project's Dependencies
-    If your project depends on other projects that may need to be downloaded
-    from PyPI or elsewhere, you should list them in your installation
-    instructions, or tell users how to find out what they are.  While most
-    users will not need this information, any users who don't have unrestricted
-    internet access may have to find, download, and install the other projects
-    manually.  (Note, however, that they must still install those projects
-    using ``easy_install``, or your project will not know they are installed,
-    and your setup script will try to download them again.)
-
-    If you want to be especially friendly to users with limited network access,
-    you may wish to build eggs for your project and its dependencies, making
-    them all available for download from your site, or at least create a page
-    with links to all of the needed eggs.  In this way, users with limited
-    network access can manually download all the eggs to a single directory,
-    then use the ``-f`` option of ``easy_install`` to specify the directory
-    to find eggs in.  Users who have full network access can just use ``-f``
-    with the URL of your download page, and ``easy_install`` will find all the
-    needed eggs using your links directly.  This is also useful when your
-    target audience isn't able to compile packages (e.g. most Windows users)
-    and your package or some of its dependencies include C code.
-
-Subversion or CVS Users and Co-Developers
-    Users and co-developers who are tracking your in-development code using
-    CVS, Subversion, or some other revision control system should probably read
-    this manual's sections regarding such development.  Alternately, you may
-    wish to create a quick-reference guide containing the tips from this manual
-    that apply to your particular situation.  For example, if you recommend
-    that people use ``setup.py develop`` when tracking your in-development
-    code, you should let them know that this needs to be run after every update
-    or commit.
-
-    Similarly, if you remove modules or data files from your project, you
-    should remind them to run ``setup.py clean --all`` and delete any obsolete
-    ``.pyc`` or ``.pyo``.  (This tip applies to the distutils in general, not
-    just setuptools, but not everybody knows about them; be kind to your users
-    by spelling out your project's best practices rather than leaving them
-    guessing.)
-
-Creating System Packages
-    Some users want to manage all Python packages using a single package
-    manager, and sometimes that package manager isn't ``easy_install``!
-    Setuptools currently supports ``bdist_rpm``, ``bdist_wininst``, and
-    ``bdist_dumb`` formats for system packaging.  If a user has a locally-
-    installed "bdist" packaging tool that internally uses the distutils
-    ``install`` command, it should be able to work with ``setuptools``.  Some
-    examples of "bdist" formats that this should work with include the
-    ``bdist_nsi`` and ``bdist_msi`` formats for Windows.
-
-    However, packaging tools that build binary distributions by running
-    ``setup.py install`` on the command line or as a subprocess will require
-    modification to work with setuptools.  They should use the
-    ``--single-version-externally-managed`` option to the ``install`` command,
-    combined with the standard ``--root`` or ``--record`` options.
-    See the `install command`_ documentation below for more details.  The
-    ``bdist_deb`` command is an example of a command that currently requires
-    this kind of patching to work with setuptools.
-
-    If you or your users have a problem building a usable system package for
-    your project, please report the problem via the mailing list so that
-    either the "bdist" tool in question or setuptools can be modified to
-    resolve the issue.
-
-
-
-Managing Multiple Projects
---------------------------
-
-If you're managing several projects that need to use ``distribute_setup``, and you
-are using Subversion as your revision control system, you can use the
-"svn:externals" property to share a single copy of ``distribute_setup`` between
-projects, so that it will always be up-to-date whenever you check out or update
-an individual project, without having to manually update each project to use
-a new version.
-
-However, because Subversion only supports using directories as externals, you
-have to turn ``distribute_setup.py`` into ``distribute_setup/__init__.py`` in order
-to do this, then create "externals" definitions that map the ``distribute_setup``
-directory into each project.  Also, if any of your projects use
-``find_packages()`` on their setup directory, you will need to exclude the
-resulting ``distribute_setup`` package, to keep it from being included in your
-distributions, e.g.::
-
-    setup(
-        ...
-        packages = find_packages(exclude=['distribute_setup']),
-    )
-
-Of course, the ``distribute_setup`` package will still be included in your
-packages' source distributions, as it needs to be.
-
-For your convenience, you may use the following external definition, which will
-track the latest version of setuptools::
-
-    ez_setup svn://svn.eby-sarna.com/svnroot/ez_setup
-
-You can set this by executing this command in your project directory::
-
-    svn propedit svn:externals .
-
-And then adding the line shown above to the file that comes up for editing.
-
-
-Setting the ``zip_safe`` flag
------------------------------
-
-For maximum performance, Python packages are best installed as zip files.
-Not all packages, however, are capable of running in compressed form, because
-they may expect to be able to access either source code or data files as
-normal operating system files.  So, ``setuptools`` can install your project
-as a zipfile or a directory, and its default choice is determined by the
-project's ``zip_safe`` flag.
-
-You can pass a True or False value for the ``zip_safe`` argument to the
-``setup()`` function, or you can omit it.  If you omit it, the ``bdist_egg``
-command will analyze your project's contents to see if it can detect any
-conditions that would prevent it from working in a zipfile.  It will output
-notices to the console about any such conditions that it finds.
-
-Currently, this analysis is extremely conservative: it will consider the
-project unsafe if it contains any C extensions or datafiles whatsoever.  This
-does *not* mean that the project can't or won't work as a zipfile!  It just
-means that the ``bdist_egg`` authors aren't yet comfortable asserting that
-the project *will* work.  If the project contains no C or data files, and does
-no ``__file__`` or ``__path__`` introspection or source code manipulation, then
-there is an extremely solid chance the project will work when installed as a
-zipfile.  (And if the project uses ``pkg_resources`` for all its data file
-access, then C extensions and other data files shouldn't be a problem at all.
-See the `Accessing Data Files at Runtime`_ section above for more information.)
-
-However, if ``bdist_egg`` can't be *sure* that your package will work, but
-you've checked over all the warnings it issued, and you are either satisfied it
-*will* work (or if you want to try it for yourself), then you should set
-``zip_safe`` to ``True`` in your ``setup()`` call.  If it turns out that it
-doesn't work, you can always change it to ``False``, which will force
-``setuptools`` to install your project as a directory rather than as a zipfile.
-
-Of course, the end-user can still override either decision, if they are using
-EasyInstall to install your package.  And, if you want to override for testing
-purposes, you can just run ``setup.py easy_install --zip-ok .`` or ``setup.py
-easy_install --always-unzip .`` in your project directory. to install the
-package as a zipfile or directory, respectively.
-
-In the future, as we gain more experience with different packages and become
-more satisfied with the robustness of the ``pkg_resources`` runtime, the
-"zip safety" analysis may become less conservative.  However, we strongly
-recommend that you determine for yourself whether your project functions
-correctly when installed as a zipfile, correct any problems if you can, and
-then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe``
-flag, so that it will not be necessary for ``bdist_egg`` or ``EasyInstall`` to
-try to guess whether your project can work as a zipfile.
-
-
-Namespace Packages
-------------------
-
-Sometimes, a large package is more useful if distributed as a collection of
-smaller eggs.  However, Python does not normally allow the contents of a
-package to be retrieved from more than one location.  "Namespace packages"
-are a solution for this problem.  When you declare a package to be a namespace
-package, it means that the package has no meaningful contents in its
-``__init__.py``, and that it is merely a container for modules and subpackages.
-
-The ``pkg_resources`` runtime will then automatically ensure that the contents
-of namespace packages that are spread over multiple eggs or directories are
-combined into a single "virtual" package.
-
-The ``namespace_packages`` argument to ``setup()`` lets you declare your
-project's namespace packages, so that they will be included in your project's
-metadata.  The argument should list the namespace packages that the egg
-participates in.  For example, the ZopeInterface project might do this::
-
-    setup(
-        # ...
-        namespace_packages = ['zope']
-    )
-
-because it contains a ``zope.interface`` package that lives in the ``zope``
-namespace package.  Similarly, a project for a standalone ``zope.publisher``
-would also declare the ``zope`` namespace package.  When these projects are
-installed and used, Python will see them both as part of a "virtual" ``zope``
-package, even though they will be installed in different locations.
-
-Namespace packages don't have to be top-level packages.  For example, Zope 3's
-``zope.app`` package is a namespace package, and in the future PEAK's
-``peak.util`` package will be too.
-
-Note, by the way, that your project's source tree must include the namespace
-packages' ``__init__.py`` files (and the ``__init__.py`` of any parent
-packages), in a normal Python package layout.  These ``__init__.py`` files
-*must* contain the line::
-
-    __import__('pkg_resources').declare_namespace(__name__)
-
-This code ensures that the namespace package machinery is operating and that
-the current package is registered as a namespace package.
-
-You must NOT include any other code and data in a namespace package's
-``__init__.py``.  Even though it may appear to work during development, or when
-projects are installed as ``.egg`` files, it will not work when the projects
-are installed using "system" packaging tools -- in such cases the
-``__init__.py`` files will not be installed, let alone executed.
-
-You must include the ``declare_namespace()``  line in the ``__init__.py`` of
-*every* project that has contents for the namespace package in question, in
-order to ensure that the namespace will be declared regardless of which
-project's copy of ``__init__.py`` is loaded first.  If the first loaded
-``__init__.py`` doesn't declare it, it will never *be* declared, because no
-other copies will ever be loaded!)
-
-
-TRANSITIONAL NOTE
-~~~~~~~~~~~~~~~~~
-
-Setuptools 0.6a automatically calls ``declare_namespace()`` for you at runtime,
-but the 0.7a versions will *not*.  This is because the automatic declaration
-feature has some negative side effects, such as needing to import all namespace
-packages during the initialization of the ``pkg_resources`` runtime, and also
-the need for ``pkg_resources`` to be explicitly imported before any namespace
-packages work at all.  Beginning with the 0.7a releases, you'll be responsible
-for including your own declaration lines, and the automatic declaration feature
-will be dropped to get rid of the negative side effects.
-
-During the remainder of the 0.6 development cycle, therefore, setuptools will
-warn you about missing ``declare_namespace()`` calls in your ``__init__.py``
-files, and you should correct these as soon as possible before setuptools 0.7a1
-is released.  Namespace packages without declaration lines will not work
-correctly once a user has upgraded to setuptools 0.7a1, so it's important that
-you make this change now in order to avoid having your code break in the field.
-Our apologies for the inconvenience, and thank you for your patience.
-
-
-
-Tagging and "Daily Build" or "Snapshot" Releases
-------------------------------------------------
-
-When a set of related projects are under development, it may be important to
-track finer-grained version increments than you would normally use for e.g.
-"stable" releases.  While stable releases might be measured in dotted numbers
-with alpha/beta/etc. status codes, development versions of a project often
-need to be tracked by revision or build number or even build date.  This is
-especially true when projects in development need to refer to one another, and
-therefore may literally need an up-to-the-minute version of something!
-
-To support these scenarios, ``setuptools`` allows you to "tag" your source and
-egg distributions by adding one or more of the following to the project's
-"official" version identifier:
-
-* A manually-specified pre-release tag, such as "build" or "dev", or a
-  manually-specified post-release tag, such as a build or revision number
-  (``--tag-build=STRING, -bSTRING``)
-
-* A "last-modified revision number" string generated automatically from
-  Subversion's metadata (assuming your project is being built from a Subversion
-  "working copy")  (``--tag-svn-revision, -r``)
-
-* An 8-character representation of the build date (``--tag-date, -d``), as
-  a postrelease tag
-
-You can add these tags by adding ``egg_info`` and the desired options to
-the command line ahead of the ``sdist`` or ``bdist`` commands that you want
-to generate a daily build or snapshot for.  See the section below on the
-`egg_info`_ command for more details.
-
-(Also, before you release your project, be sure to see the section above on
-`Specifying Your Project's Version`_ for more information about how pre- and
-post-release tags affect how setuptools and EasyInstall interpret version
-numbers.  This is important in order to make sure that dependency processing
-tools will know which versions of your project are newer than others.)
-
-Finally, if you are creating builds frequently, and either building them in a
-downloadable location or are copying them to a distribution server, you should
-probably also check out the `rotate`_ command, which lets you automatically
-delete all but the N most-recently-modified distributions matching a glob
-pattern.  So, you can use a command line like::
-
-    setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
-
-to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the
-most recent Subversion revision that affected the source tree), and then
-delete any egg files from the distribution directory except for the three
-that were built most recently.
-
-If you have to manage automated builds for multiple packages, each with
-different tagging and rotation policies, you may also want to check out the
-`alias`_ command, which would let each package define an alias like ``daily``
-that would perform the necessary tag, build, and rotate commands.  Then, a
-simpler script or cron job could just run ``setup.py daily`` in each project
-directory.  (And, you could also define sitewide or per-user default versions
-of the ``daily`` alias, so that projects that didn't define their own would
-use the appropriate defaults.)
-
-
-Generating Source Distributions
--------------------------------
-
-``setuptools`` enhances the distutils' default algorithm for source file
-selection, so that all files managed by CVS or Subversion in your project tree
-are included in any source distribution you build.  This is a big improvement
-over having to manually write a ``MANIFEST.in`` file and try to keep it in
-sync with your project.  So, if you are using CVS or Subversion, and your
-source distributions only need to include files that you're tracking in
-revision control, don't create a a ``MANIFEST.in`` file for your project.
-(And, if you already have one, you might consider deleting it the next time
-you would otherwise have to change it.)
-
-(NOTE: other revision control systems besides CVS and Subversion can be
-supported using plugins; see the section below on `Adding Support for Other
-Revision Control Systems`_ for information on how to write such plugins.)
-
-If you need to include automatically generated files, or files that are kept in
-an unsupported revision control system, you'll need to create a ``MANIFEST.in``
-file to specify any files that the default file location algorithm doesn't
-catch.  See the distutils documentation for more information on the format of
-the ``MANIFEST.in`` file.
-
-But, be sure to ignore any part of the distutils documentation that deals with
-``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you
-from these issues and doesn't work the same way in any case.  Unlike the
-distutils, setuptools regenerates the source distribution manifest file
-every time you build a source distribution, and it builds it inside the
-project's ``.egg-info`` directory, out of the way of your main project
-directory.  You therefore need not worry about whether it is up-to-date or not.
-
-Indeed, because setuptools' approach to determining the contents of a source
-distribution is so much simpler, its ``sdist`` command omits nearly all of
-the options that the distutils' more complex ``sdist`` process requires.  For
-all practical purposes, you'll probably use only the ``--formats`` option, if
-you use any option at all.
-
-(By the way, if you're using some other revision control system, you might
-consider creating and publishing a `revision control plugin for setuptools`_.)
-
-
-.. _revision control plugin for setuptools: `Adding Support for Other Revision Control Systems`_
-
-
-Making your package available for EasyInstall
----------------------------------------------
-
-If you use the ``register`` command (``setup.py register``) to register your
-package with PyPI, that's most of the battle right there.  (See the
-`docs for the register command`_ for more details.)
-
-.. _docs for the register command: http://docs.python.org/dist/package-index.html
-
-If you also use the `upload`_ command to upload actual distributions of your
-package, that's even better, because EasyInstall will be able to find and
-download them directly from your project's PyPI page.
-
-However, there may be reasons why you don't want to upload distributions to
-PyPI, and just want your existing distributions (or perhaps a Subversion
-checkout) to be used instead.
-
-So here's what you need to do before running the ``register`` command.  There
-are three ``setup()`` arguments that affect EasyInstall:
-
-``url`` and ``download_url``
-   These become links on your project's PyPI page.  EasyInstall will examine
-   them to see if they link to a package ("primary links"), or whether they are
-   HTML pages.  If they're HTML pages, EasyInstall scans all HREF's on the
-   page for primary links
-
-``long_description``
-   EasyInstall will check any URLs contained in this argument to see if they
-   are primary links.
-
-A URL is considered a "primary link" if it is a link to a .tar.gz, .tgz, .zip,
-.egg, .egg.zip, .tar.bz2, or .exe file, or if it has an ``#egg=project`` or
-``#egg=project-version`` fragment identifier attached to it.  EasyInstall
-attempts to determine a project name and optional version number from the text
-of a primary link *without* downloading it.  When it has found all the primary
-links, EasyInstall will select the best match based on requested version,
-platform compatibility, and other criteria.
-
-So, if your ``url`` or ``download_url`` point either directly to a downloadable
-source distribution, or to HTML page(s) that have direct links to such, then
-EasyInstall will be able to locate downloads automatically.  If you want to
-make Subversion checkouts available, then you should create links with either
-``#egg=project`` or ``#egg=project-version`` added to the URL.  You should
-replace ``project`` and ``version`` with the values they would have in an egg
-filename.  (Be sure to actually generate an egg and then use the initial part
-of the filename, rather than trying to guess what the escaped form of the
-project name and version number will be.)
-
-Note that Subversion checkout links are of lower precedence than other kinds
-of distributions, so EasyInstall will not select a Subversion checkout for
-downloading unless it has a version included in the ``#egg=`` suffix, and
-it's a higher version than EasyInstall has seen in any other links for your
-project.
-
-As a result, it's a common practice to use mark checkout URLs with a version of
-"dev" (i.e., ``#egg=projectname-dev``), so that users can do something like
-this::
-
-    easy_install --editable projectname==dev
-
-in order to check out the in-development version of ``projectname``.
-
-
-Managing "Continuous Releases" Using Subversion
------------------------------------------------
-
-If you expect your users to track in-development versions of your project via
-Subversion, there are a few additional steps you should take to ensure that
-things work smoothly with EasyInstall.  First, you should add the following
-to your project's ``setup.cfg`` file:
-
-.. code-block:: ini
-
-    [egg_info]
-    tag_build = .dev
-    tag_svn_revision = 1
-
-This will tell ``setuptools`` to generate package version numbers like
-``1.0a1.dev-r1263``, which will be considered to be an *older* release than
-``1.0a1``.  Thus, when you actually release ``1.0a1``, the entire egg
-infrastructure (including ``setuptools``, ``pkg_resources`` and EasyInstall)
-will know that ``1.0a1`` supersedes any interim snapshots from Subversion, and
-handle upgrades accordingly.
-
-(Note: the project version number you specify in ``setup.py`` should always be
-the *next* version of your software, not the last released version.
-Alternately, you can leave out the ``tag_build=.dev``, and always use the
-*last* release as a version number, so that your post-1.0 builds are labelled
-``1.0-r1263``, indicating a post-1.0 patchlevel.  Most projects so far,
-however, seem to prefer to think of their project as being a future version
-still under development, rather than a past version being patched.  It is of
-course possible for a single project to have both situations, using
-post-release numbering on release branches, and pre-release numbering on the
-trunk.  But you don't have to make things this complex if you don't want to.)
-
-Commonly, projects releasing code from Subversion will include a PyPI link to
-their checkout URL (as described in the previous section) with an
-``#egg=projectname-dev`` suffix.  This allows users to request EasyInstall
-to download ``projectname==dev`` in order to get the latest in-development
-code.  Note that if your project depends on such in-progress code, you may wish
-to specify your ``install_requires`` (or other requirements) to include
-``==dev``, e.g.:
-
-.. code-block:: python
-
-    install_requires = ["OtherProject>=0.2a1.dev-r143,==dev"]
-
-The above example says, "I really want at least this particular development
-revision number, but feel free to follow and use an ``#egg=OtherProject-dev``
-link if you find one".  This avoids the need to have actual source or binary
-distribution snapshots of in-development code available, just to be able to
-depend on the latest and greatest a project has to offer.
-
-A final note for Subversion development: if you are using SVN revision tags
-as described in this section, it's a good idea to run ``setup.py develop``
-after each Subversion checkin or update, because your project's version number
-will be changing, and your script wrappers need to be updated accordingly.
-
-Also, if the project's requirements have changed, the ``develop`` command will
-take care of fetching the updated dependencies, building changed extensions,
-etc.  Be sure to also remind any of your users who check out your project
-from Subversion that they need to run ``setup.py develop`` after every update
-in order to keep their checkout completely in sync.
-
-
-Making "Official" (Non-Snapshot) Releases
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-When you make an official release, creating source or binary distributions,
-you will need to override the tag settings from ``setup.cfg``, so that you
-don't end up registering versions like ``foobar-0.7a1.dev-r34832``.  This is
-easy to do if you are developing on the trunk and using tags or branches for
-your releases - just make the change to ``setup.cfg`` after branching or
-tagging the release, so the trunk will still produce development snapshots.
-
-Alternately, if you are not branching for releases, you can override the
-default version options on the command line, using something like::
-
-    python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-The first part of this command (``egg_info -RDb ""``) will override the
-configured tag information, before creating source and binary eggs, registering
-the project with PyPI, and uploading the files.  Thus, these commands will use
-the plain version from your ``setup.py``, without adding the Subversion
-revision number or build designation string.
-
-Of course, if you will be doing this a lot, you may wish to create a personal
-alias for this operation, e.g.::
-
-    python setup.py alias -u release egg_info -RDb ""
-
-You can then use it like this::
-
-    python setup.py release sdist bdist_egg register upload
-
-Or of course you can create more elaborate aliases that do all of the above.
-See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
-
-
-
-Distributing Extensions compiled with Pyrex
--------------------------------------------
-
-``setuptools`` includes transparent support for building Pyrex extensions, as
-long as you define your extensions using ``setuptools.Extension``, *not*
-``distutils.Extension``.  You must also not import anything from Pyrex in
-your setup script.
-
-If you follow these rules, you can safely list ``.pyx`` files as the source
-of your ``Extension`` objects in the setup script.  ``setuptools`` will detect
-at build time whether Pyrex is installed or not.  If it is, then ``setuptools``
-will use it.  If not, then ``setuptools`` will silently change the
-``Extension`` objects to refer to the ``.c`` counterparts of the ``.pyx``
-files, so that the normal distutils C compilation process will occur.
-
-Of course, for this to work, your source distributions must include the C
-code generated by Pyrex, as well as your original ``.pyx`` files.  This means
-that you will probably want to include current ``.c`` files in your revision
-control system, rebuilding them whenever you check changes in for the ``.pyx``
-source files.  This will ensure that people tracking your project in CVS or
-Subversion will be able to build it even if they don't have Pyrex installed,
-and that your source releases will be similarly usable with or without Pyrex.
-
-
------------------
-Command Reference
------------------
-
-.. _alias:
-
-``alias`` - Define shortcuts for commonly used commands
-=======================================================
-
-Sometimes, you need to use the same commands over and over, but you can't
-necessarily set them as defaults.  For example, if you produce both development
-snapshot releases and "stable" releases of a project, you may want to put
-the distributions in different places, or use different ``egg_info`` tagging
-options, etc.  In these cases, it doesn't make sense to set the options in
-a distutils configuration file, because the values of the options changed based
-on what you're trying to do.
-
-Setuptools therefore allows you to define "aliases" - shortcut names for
-an arbitrary string of commands and options, using ``setup.py alias aliasname
-expansion``, where aliasname is the name of the new alias, and the remainder of
-the command line supplies its expansion.  For example, this command defines
-a sitewide alias called "daily", that sets various ``egg_info`` tagging
-options::
-
-    setup.py alias --global-config daily egg_info --tag-svn-revision \
-        --tag-build=development
-
-Once the alias is defined, it can then be used with other setup commands,
-e.g.::
-
-    setup.py daily bdist_egg        # generate a daily-build .egg file
-    setup.py daily sdist            # generate a daily-build source distro
-    setup.py daily sdist bdist_egg  # generate both
-
-The above commands are interpreted as if the word ``daily`` were replaced with
-``egg_info --tag-svn-revision --tag-build=development``.
-
-Note that setuptools will expand each alias *at most once* in a given command
-line.  This serves two purposes.  First, if you accidentally create an alias
-loop, it will have no effect; you'll instead get an error message about an
-unknown command.  Second, it allows you to define an alias for a command, that
-uses that command.  For example, this (project-local) alias::
-
-    setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg
-
-redefines the ``bdist_egg`` command so that it always runs the ``rotate``
-command afterwards to delete all but the newest egg file.  It doesn't loop
-indefinitely on ``bdist_egg`` because the alias is only expanded once when
-used.
-
-You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.::
-
-    setup.py alias --global-config --remove daily
-
-would delete the "daily" alias we defined above.
-
-Aliases can be defined on a project-specific, per-user, or sitewide basis.  The
-default is to define or remove a project-specific alias, but you can use any of
-the `configuration file options`_ (listed under the `saveopts`_ command, below)
-to determine which distutils configuration file an aliases will be added to
-(or removed from).
-
-Note that if you omit the "expansion" argument to the ``alias`` command,
-you'll get output showing that alias' current definition (and what
-configuration file it's defined in).  If you omit the alias name as well,
-you'll get a listing of all current aliases along with their configuration
-file locations.
-
-
-``bdist_egg`` - Create a Python Egg for the project
-===================================================
-
-This command generates a Python Egg (``.egg`` file) for the project.  Python
-Eggs are the preferred binary distribution format for EasyInstall, because they
-are cross-platform (for "pure" packages), directly importable, and contain
-project metadata including scripts and information about the project's
-dependencies.  They can be simply downloaded and added to ``sys.path``
-directly, or they can be placed in a directory on ``sys.path`` and then
-automatically discovered by the egg runtime system.
-
-This command runs the `egg_info`_ command (if it hasn't already run) to update
-the project's metadata (``.egg-info``) directory.  If you have added any extra
-metadata files to the ``.egg-info`` directory, those files will be included in
-the new egg file's metadata directory, for use by the egg runtime system or by
-any applications or frameworks that use that metadata.
-
-You won't usually need to specify any special options for this command; just
-use ``bdist_egg`` and you're done.  But there are a few options that may
-be occasionally useful:
-
-``--dist-dir=DIR, -d DIR``
-    Set the directory where the ``.egg`` file will be placed.  If you don't
-    supply this, then the ``--dist-dir`` setting of the ``bdist`` command
-    will be used, which is usually a directory named ``dist`` in the project
-    directory.
-
-``--plat-name=PLATFORM, -p PLATFORM``
-    Set the platform name string that will be embedded in the egg's filename
-    (assuming the egg contains C extensions).  This can be used to override
-    the distutils default platform name with something more meaningful.  Keep
-    in mind, however, that the egg runtime system expects to see eggs with
-    distutils platform names, so it may ignore or reject eggs with non-standard
-    platform names.  Similarly, the EasyInstall program may ignore them when
-    searching web pages for download links.  However, if you are
-    cross-compiling or doing some other unusual things, you might find a use
-    for this option.
-
-``--exclude-source-files``
-    Don't include any modules' ``.py`` files in the egg, just compiled Python,
-    C, and data files.  (Note that this doesn't affect any ``.py`` files in the
-    EGG-INFO directory or its subdirectories, since for example there may be
-    scripts with a ``.py`` extension which must still be retained.)  We don't
-    recommend that you use this option except for packages that are being
-    bundled for proprietary end-user applications, or for "embedded" scenarios
-    where space is at an absolute premium.  On the other hand, if your package
-    is going to be installed and used in compressed form, you might as well
-    exclude the source because Python's ``traceback`` module doesn't currently
-    understand how to display zipped source code anyway, or how to deal with
-    files that are in a different place from where their code was compiled.
-
-There are also some options you will probably never need, but which are there
-because they were copied from similar ``bdist`` commands used as an example for
-creating this one.  They may be useful for testing and debugging, however,
-which is why we kept them:
-
-``--keep-temp, -k``
-    Keep the contents of the ``--bdist-dir`` tree around after creating the
-    ``.egg`` file.
-
-``--bdist-dir=DIR, -b DIR``
-    Set the temporary directory for creating the distribution.  The entire
-    contents of this directory are zipped to create the ``.egg`` file, after
-    running various installation commands to copy the package's modules, data,
-    and extensions here.
-
-``--skip-build``
-    Skip doing any "build" commands; just go straight to the
-    install-and-compress phases.
-
-
-.. _develop:
-
-``develop`` - Deploy the project source in "Development Mode"
-=============================================================
-
-This command allows you to deploy your project's source for use in one or more
-"staging areas" where it will be available for importing.  This deployment is
-done in such a way that changes to the project source are immediately available
-in the staging area(s), without needing to run a build or install step after
-each change.
-
-The ``develop`` command works by creating an ``.egg-link`` file (named for the
-project) in the given staging area.  If the staging area is Python's
-``site-packages`` directory, it also updates an ``easy-install.pth`` file so
-that the project is on ``sys.path`` by default for all programs run using that
-Python installation.
-
-The ``develop`` command also installs wrapper scripts in the staging area (or
-a separate directory, as specified) that will ensure the project's dependencies
-are available on ``sys.path`` before running the project's source scripts.
-And, it ensures that any missing project dependencies are available in the
-staging area, by downloading and installing them if necessary.
-
-Last, but not least, the ``develop`` command invokes the ``build_ext -i``
-command to ensure any C extensions in the project have been built and are
-up-to-date, and the ``egg_info`` command to ensure the project's metadata is
-updated (so that the runtime and wrappers know what the project's dependencies
-are).  If you make any changes to the project's setup script or C extensions,
-you should rerun the ``develop`` command against all relevant staging areas to
-keep the project's scripts, metadata and extensions up-to-date.  Most other
-kinds of changes to your project should not require any build operations or
-rerunning ``develop``, but keep in mind that even minor changes to the setup
-script (e.g. changing an entry point definition) require you to re-run the
-``develop`` or ``test`` commands to keep the distribution updated.
-
-Here are some of the options that the ``develop`` command accepts.  Note that
-they affect the project's dependencies as well as the project itself, so if you
-have dependencies that need to be installed and you use ``--exclude-scripts``
-(for example), the dependencies' scripts will not be installed either!  For
-this reason, you may want to use EasyInstall to install the project's
-dependencies before using the ``develop`` command, if you need finer control
-over the installation options for dependencies.
-
-``--uninstall, -u``
-    Un-deploy the current project.  You may use the ``--install-dir`` or ``-d``
-    option to designate the staging area.  The created ``.egg-link`` file will
-    be removed, if present and it is still pointing to the project directory.
-    The project directory will be removed from ``easy-install.pth`` if the
-    staging area is Python's ``site-packages`` directory.
-
-    Note that this option currently does *not* uninstall script wrappers!  You
-    must uninstall them yourself, or overwrite them by using EasyInstall to
-    activate a different version of the package.  You can also avoid installing
-    script wrappers in the first place, if you use the ``--exclude-scripts``
-    (aka ``-x``) option when you run ``develop`` to deploy the project.
-
-``--multi-version, -m``
-    "Multi-version" mode. Specifying this option prevents ``develop`` from
-    adding an ``easy-install.pth`` entry for the project(s) being deployed, and
-    if an entry for any version of a project already exists, the entry will be
-    removed upon successful deployment.  In multi-version mode, no specific
-    version of the package is available for importing, unless you use
-    ``pkg_resources.require()`` to put it on ``sys.path``, or you are running
-    a wrapper script generated by ``setuptools`` or EasyInstall.  (In which
-    case the wrapper script calls ``require()`` for you.)
-
-    Note that if you install to a directory other than ``site-packages``,
-    this option is automatically in effect, because ``.pth`` files can only be
-    used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use
-    the ``--install-dir`` or ``-d`` option (or they are set via configuration
-    file(s)) your project and its dependencies will be deployed in multi-
-    version mode.
-
-``--install-dir=DIR, -d DIR``
-    Set the installation directory (staging area).  If this option is not
-    directly specified on the command line or in a distutils configuration
-    file, the distutils default installation location is used.  Normally, this
-    will be the ``site-packages`` directory, but if you are using distutils
-    configuration files, setting things like ``prefix`` or ``install_lib``,
-    then those settings are taken into account when computing the default
-    staging area.
-
-``--script-dir=DIR, -s DIR``
-    Set the script installation directory.  If you don't supply this option
-    (via the command line or a configuration file), but you *have* supplied
-    an ``--install-dir`` (via command line or config file), then this option
-    defaults to the same directory, so that the scripts will be able to find
-    their associated package installation.  Otherwise, this setting defaults
-    to the location where the distutils would normally install scripts, taking
-    any distutils configuration file settings into account.
-
-``--exclude-scripts, -x``
-    Don't deploy script wrappers.  This is useful if you don't want to disturb
-    existing versions of the scripts in the staging area.
-
-``--always-copy, -a``
-    Copy all needed distributions to the staging area, even if they
-    are already present in another directory on ``sys.path``.  By default, if
-    a requirement can be met using a distribution that is already available in
-    a directory on ``sys.path``, it will not be copied to the staging area.
-
-``--egg-path=DIR``
-    Force the generated ``.egg-link`` file to use a specified relative path
-    to the source directory.  This can be useful in circumstances where your
-    installation directory is being shared by code running under multiple
-    platforms (e.g. Mac and Windows) which have different absolute locations
-    for the code under development, but the same *relative* locations with
-    respect to the installation directory.  If you use this option when
-    installing, you must supply the same relative path when uninstalling.
-
-In addition to the above options, the ``develop`` command also accepts all of
-the same options accepted by ``easy_install``.  If you've configured any
-``easy_install`` settings in your ``setup.cfg`` (or other distutils config
-files), the ``develop`` command will use them as defaults, unless you override
-them in a ``[develop]`` section or on the command line.
-
-
-``easy_install`` - Find and install packages
-============================================
-
-This command runs the `EasyInstall tool
-<http://peak.telecommunity.com/DevCenter/EasyInstall>`_ for you.  It is exactly
-equivalent to running the ``easy_install`` command.  All command line arguments
-following this command are consumed and not processed further by the distutils,
-so this must be the last command listed on the command line.  Please see
-the EasyInstall documentation for the options reference and usage examples.
-Normally, there is no reason to use this command via the command line, as you
-can just use ``easy_install`` directly.  It's only listed here so that you know
-it's a distutils command, which means that you can:
-
-* create command aliases that use it,
-* create distutils extensions that invoke it as a subcommand, and
-* configure options for it in your ``setup.cfg`` or other distutils config
-  files.
-
-
-.. _egg_info:
-
-``egg_info`` - Create egg metadata and set build tags
-=====================================================
-
-This command performs two operations: it updates a project's ``.egg-info``
-metadata directory (used by the ``bdist_egg``, ``develop``, and ``test``
-commands), and it allows you to temporarily change a project's version string,
-to support "daily builds" or "snapshot" releases.  It is run automatically by
-the ``sdist``, ``bdist_egg``, ``develop``, ``register``, and ``test`` commands
-in order to update the project's metadata, but you can also specify it
-explicitly in order to temporarily change the project's version string while
-executing other commands.  (It also generates the``.egg-info/SOURCES.txt``
-manifest file, which is used when you are building source distributions.)
-
-In addition to writing the core egg metadata defined by ``setuptools`` and
-required by ``pkg_resources``, this command can be extended to write other
-metadata files as well, by defining entry points in the ``egg_info.writers``
-group.  See the section on `Adding new EGG-INFO Files`_ below for more details.
-Note that using additional metadata writers may require you to include a
-``setup_requires`` argument to ``setup()`` in order to ensure that the desired
-writers are available on ``sys.path``.
-
-
-Release Tagging Options
------------------------
-
-The following options can be used to modify the project's version string for
-all remaining commands on the setup command line.  The options are processed
-in the order shown, so if you use more than one, the requested tags will be
-added in the following order:
-
-``--tag-build=NAME, -b NAME``
-    Append NAME to the project's version string.  Due to the way setuptools
-    processes "pre-release" version suffixes beginning with the letters "a"
-    through "e" (like "alpha", "beta", and "candidate"), you will usually want
-    to use a tag like ".build" or ".dev", as this will cause the version number
-    to be considered *lower* than the project's default version.  (If you
-    want to make the version number *higher* than the default version, you can
-    always leave off --tag-build and then use one or both of the following
-    options.)
-
-    If you have a default build tag set in your ``setup.cfg``, you can suppress
-    it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument
-    to the ``egg_info`` command.
-
-``--tag-svn-revision, -r``
-    If the current directory is a Subversion checkout (i.e. has a ``.svn``
-    subdirectory, this appends a string of the form "-rNNNN" to the project's
-    version string, where NNNN is the revision number of the most recent
-    modification to the current directory, as obtained from the ``svn info``
-    command.
-
-    If the current directory is not a Subversion checkout, the command will
-    look for a ``PKG-INFO`` file instead, and try to find the revision number
-    from that, by looking for a "-rNNNN" string at the end of the version
-    number.  (This is so that building a package from a source distribution of
-    a Subversion snapshot will produce a binary with the correct version
-    number.)
-
-    If there is no ``PKG-INFO`` file, or the version number contained therein
-    does not end with ``-r`` and a number, then ``-r0`` is used.
-
-``--no-svn-revision, -R``
-    Don't include the Subversion revision in the version number.  This option
-    is included so you can override a default setting put in ``setup.cfg``.
-
-``--tag-date, -d``
-    Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the
-    project's version number.
-
-``--no-date, -D``
-    Don't include a date stamp in the version number.  This option is included
-    so you can override a default setting in ``setup.cfg``.
-
-
-(Note: Because these options modify the version number used for source and
-binary distributions of your project, you should first make sure that you know
-how the resulting version numbers will be interpreted by automated tools
-like EasyInstall.  See the section above on `Specifying Your Project's
-Version`_ for an explanation of pre- and post-release tags, as well as tips on
-how to choose and verify a versioning scheme for your your project.)
-
-For advanced uses, there is one other option that can be set, to change the
-location of the project's ``.egg-info`` directory.  Commands that need to find
-the project's source directory or metadata should get it from this setting:
-
-
-Other ``egg_info`` Options
---------------------------
-
-``--egg-base=SOURCEDIR, -e SOURCEDIR``
-    Specify the directory that should contain the .egg-info directory.  This
-    should normally be the root of your project's source tree (which is not
-    necessarily the same as your project directory; some projects use a ``src``
-    or ``lib`` subdirectory as the source root).  You should not normally need
-    to specify this directory, as it is normally determined from the
-    ``package_dir`` argument to the ``setup()`` function, if any.  If there is
-    no ``package_dir`` set, this option defaults to the current directory.
-
-
-``egg_info`` Examples
----------------------
-
-Creating a dated "nightly build" snapshot egg::
-
-    python setup.py egg_info --tag-date --tag-build=DEV bdist_egg
-
-Creating and uploading a release with no version tags, even if some default
-tags are specified in ``setup.cfg``::
-
-    python setup.py egg_info -RDb "" sdist bdist_egg register upload
-
-(Notice that ``egg_info`` must always appear on the command line *before* any
-commands that you want the version changes to apply to.)
-
-
-.. _install command:
-
-``install`` - Run ``easy_install`` or old-style installation
-============================================================
-
-The setuptools ``install`` command is basically a shortcut to run the
-``easy_install`` command on the current project.  However, for convenience
-in creating "system packages" of setuptools-based projects, you can also
-use this option:
-
-``--single-version-externally-managed``
-    This boolean option tells the ``install`` command to perform an "old style"
-    installation, with the addition of an ``.egg-info`` directory so that the
-    installed project will still have its metadata available and operate
-    normally.  If you use this option, you *must* also specify the ``--root``
-    or ``--record`` options (or both), because otherwise you will have no way
-    to identify and remove the installed files.
-
-This option is automatically in effect when ``install`` is invoked by another
-distutils command, so that commands like ``bdist_wininst`` and ``bdist_rpm``
-will create system packages of eggs.  It is also automatically in effect if
-you specify the ``--root`` option.
-
-
-``install_egg_info`` - Install an ``.egg-info`` directory in ``site-packages``
-==============================================================================
-
-Setuptools runs this command as part of ``install`` operations that use the
-``--single-version-externally-managed`` options.  You should not invoke it
-directly; it is documented here for completeness and so that distutils
-extensions such as system package builders can make use of it.  This command
-has only one option:
-
-``--install-dir=DIR, -d DIR``
-    The parent directory where the ``.egg-info`` directory will be placed.
-    Defaults to the same as the ``--install-dir`` option specified for the
-    ``install_lib`` command, which is usually the system ``site-packages``
-    directory.
-
-This command assumes that the ``egg_info`` command has been given valid options
-via the command line or ``setup.cfg``, as it will invoke the ``egg_info``
-command and use its options to locate the project's source ``.egg-info``
-directory.
-
-
-.. _rotate:
-
-``rotate`` - Delete outdated distribution files
-===============================================
-
-As you develop new versions of your project, your distribution (``dist``)
-directory will gradually fill up with older source and/or binary distribution
-files.  The ``rotate`` command lets you automatically clean these up, keeping
-only the N most-recently modified files matching a given pattern.
-
-``--match=PATTERNLIST, -m PATTERNLIST``
-    Comma-separated list of glob patterns to match.  This option is *required*.
-    The project name and ``-*`` is prepended to the supplied patterns, in order
-    to match only distributions belonging to the current project (in case you
-    have a shared distribution directory for multiple projects).  Typically,
-    you will use a glob pattern like ``.zip`` or ``.egg`` to match files of
-    the specified type.  Note that each supplied pattern is treated as a
-    distinct group of files for purposes of selecting files to delete.
-
-``--keep=COUNT, -k COUNT``
-    Number of matching distributions to keep.  For each group of files
-    identified by a pattern specified with the ``--match`` option, delete all
-    but the COUNT most-recently-modified files in that group.  This option is
-    *required*.
-
-``--dist-dir=DIR, -d DIR``
-    Directory where the distributions are.  This defaults to the value of the
-    ``bdist`` command's ``--dist-dir`` option, which will usually be the
-    project's ``dist`` subdirectory.
-
-**Example 1**: Delete all .tar.gz files from the distribution directory, except
-for the 3 most recently modified ones::
-
-    setup.py rotate --match=.tar.gz --keep=3
-
-**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution
-directory, except the most recently modified one for each Python version::
-
-    setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1
-
-
-.. _saveopts:
-
-``saveopts`` - Save used options to a configuration file
-========================================================
-
-Finding and editing ``distutils`` configuration files can be a pain, especially
-since you also have to translate the configuration options from command-line
-form to the proper configuration file format.  You can avoid these hassles by
-using the ``saveopts`` command.  Just add it to the command line to save the
-options you used.  For example, this command builds the project using
-the ``mingw32`` C compiler, then saves the --compiler setting as the default
-for future builds (even those run implicitly by the ``install`` command)::
-
-    setup.py build --compiler=mingw32 saveopts
-
-The ``saveopts`` command saves all options for every commmand specified on the
-command line to the project's local ``setup.cfg`` file, unless you use one of
-the `configuration file options`_ to change where the options are saved.  For
-example, this command does the same as above, but saves the compiler setting
-to the site-wide (global) distutils configuration::
-
-    setup.py build --compiler=mingw32 saveopts -g
-
-Note that it doesn't matter where you place the ``saveopts`` command on the
-command line; it will still save all the options specified for all commands.
-For example, this is another valid way to spell the last example::
-
-    setup.py saveopts -g build --compiler=mingw32
-
-Note, however, that all of the commands specified are always run, regardless of
-where ``saveopts`` is placed on the command line.
-
-
-Configuration File Options
---------------------------
-
-Normally, settings such as options and aliases are saved to the project's
-local ``setup.cfg`` file.  But you can override this and save them to the
-global or per-user configuration files, or to a manually-specified filename.
-
-``--global-config, -g``
-    Save settings to the global ``distutils.cfg`` file inside the ``distutils``
-    package directory.  You must have write access to that directory to use
-    this option.  You also can't combine this option with ``-u`` or ``-f``.
-
-``--user-config, -u``
-    Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or
-    ``$HOME/pydistutils.cfg`` (Windows) file.  You can't combine this option
-    with ``-g`` or ``-f``.
-
-``--filename=FILENAME, -f FILENAME``
-    Save settings to the specified configuration file to use.  You can't
-    combine this option with ``-g`` or ``-u``.  Note that if you specify a
-    non-standard filename, the ``distutils`` and ``setuptools`` will not
-    use the file's contents.  This option is mainly included for use in
-    testing.
-
-These options are used by other ``setuptools`` commands that modify
-configuration files, such as the `alias`_ and `setopt`_ commands.
-
-
-.. _setopt:
-
-``setopt`` - Set a distutils or setuptools option in a config file
-==================================================================
-
-This command is mainly for use by scripts, but it can also be used as a quick
-and dirty way to change a distutils configuration option without having to
-remember what file the options are in and then open an editor.
-
-**Example 1**.  Set the default C compiler to ``mingw32`` (using long option
-names)::
-
-    setup.py setopt --command=build --option=compiler --set-value=mingw32
-
-**Example 2**.  Remove any setting for the distutils default package
-installation directory (short option names)::
-
-    setup.py setopt -c install -o install_lib -r
-
-
-Options for the ``setopt`` command:
-
-``--command=COMMAND, -c COMMAND``
-    Command to set the option for.  This option is required.
-
-``--option=OPTION, -o OPTION``
-    The name of the option to set.  This option is required.
-
-``--set-value=VALUE, -s VALUE``
-    The value to set the option to.  Not needed if ``-r`` or ``--remove`` is
-    set.
-
-``--remove, -r``
-    Remove (unset) the option, instead of setting it.
-
-In addition to the above options, you may use any of the `configuration file
-options`_ (listed under the `saveopts`_ command, above) to determine which
-distutils configuration file the option will be added to (or removed from).
-
-
-.. _test:
-
-``test`` - Build package and run a unittest suite
-=================================================
-
-When doing test-driven development, or running automated builds that need
-testing before they are deployed for downloading or use, it's often useful
-to be able to run a project's unit tests without actually deploying the project
-anywhere, even using the ``develop`` command.  The ``test`` command runs a
-project's unit tests without actually deploying it, by temporarily putting the
-project's source on ``sys.path``, after first running ``build_ext -i`` and
-``egg_info`` to ensure that any C extensions and project metadata are
-up-to-date.
-
-To use this command, your project's tests must be wrapped in a ``unittest``
-test suite by either a function, a ``TestCase`` class or method, or a module
-or package containing ``TestCase`` classes.  If the named suite is a module,
-and the module has an ``additional_tests()`` function, it is called and the
-result (which must be a ``unittest.TestSuite``) is added to the tests to be
-run.  If the named suite is a package, any submodules and subpackages are
-recursively added to the overall test suite.  (Note: if your project specifies
-a ``test_loader``, the rules for processing the chosen ``test_suite`` may
-differ; see the `test_loader`_ documentation for more details.)
-
-Note that many test systems including ``doctest`` support wrapping their
-non-``unittest`` tests in ``TestSuite`` objects.  So, if you are using a test
-package that does not support this, we suggest you encourage its developers to
-implement test suite support, as this is a convenient and standard way to
-aggregate a collection of tests to be run under a common test harness.
-
-By default, tests will be run in the "verbose" mode of the ``unittest``
-package's text test runner, but you can get the "quiet" mode (just dots) if
-you supply the ``-q`` or ``--quiet`` option, either as a global option to
-the setup script (e.g. ``setup.py -q test``) or as an option for the ``test``
-command itself (e.g. ``setup.py test -q``).  There is one other option
-available:
-
-``--test-suite=NAME, -s NAME``
-    Specify the test suite (or module, class, or method) to be run
-    (e.g. ``some_module.test_suite``).  The default for this option can be
-    set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.::
-
-        setup(
-            # ...
-            test_suite = "my_package.tests.test_all"
-        )
-
-    If you did not set a ``test_suite`` in your ``setup()`` call, and do not
-    provide a ``--test-suite`` option, an error will occur.
-
-
-.. _upload:
-
-``upload`` - Upload source and/or egg distributions to PyPI
-===========================================================
-
-PyPI now supports uploading project files for redistribution; uploaded files
-are easily found by EasyInstall, even if you don't have download links on your
-project's home page.
-
-Although Python 2.5 will support uploading all types of distributions to PyPI,
-setuptools only supports source distributions and eggs.  (This is partly
-because PyPI's upload support is currently broken for various other file
-types.)  To upload files, you must include the ``upload`` command *after* the
-``sdist`` or ``bdist_egg`` commands on the setup command line.  For example::
-
-    setup.py bdist_egg upload         # create an egg and upload it
-    setup.py sdist upload             # create a source distro and upload it
-    setup.py sdist bdist_egg upload   # create and upload both
-
-Note that to upload files for a project, the corresponding version must already
-be registered with PyPI, using the distutils ``register`` command.  It's
-usually a good idea to include the ``register`` command at the start of the
-command line, so that any registration problems can be found and fixed before
-building and uploading the distributions, e.g.::
-
-    setup.py register sdist bdist_egg upload
-
-This will update PyPI's listing for your project's current version.
-
-Note, by the way, that the metadata in your ``setup()`` call determines what
-will be listed in PyPI for your package.  Try to fill out as much of it as
-possible, as it will save you a lot of trouble manually adding and updating
-your PyPI listings.  Just put it in ``setup.py`` and use the ``register``
-comamnd to keep PyPI up to date.
-
-The ``upload`` command has a few options worth noting:
-
-``--sign, -s``
-    Sign each uploaded file using GPG (GNU Privacy Guard).  The ``gpg`` program
-    must be available for execution on the system ``PATH``.
-
-``--identity=NAME, -i NAME``
-    Specify the identity or key name for GPG to use when signing.  The value of
-    this option will be passed through the ``--local-user`` option of the
-    ``gpg`` program.
-
-``--show-response``
-    Display the full response text from server; this is useful for debugging
-    PyPI problems.
-
-``--repository=URL, -r URL``
-    The URL of the repository to upload to.  Defaults to
-    http://pypi.python.org/pypi (i.e., the main PyPI installation).
-
-.. _upload_docs:
-
-``upload_docs`` - Upload package documentation to PyPI
-======================================================
-
-PyPI now supports uploading project documentation to the dedicated URL
-http://packages.python.org/<project>/.
-
-The ``upload_docs`` command will create the necessary zip file out of a
-documentation directory and will post to the repository.
-
-Note that to upload the documentation of a project, the corresponding version
-must already be registered with PyPI, using the distutils ``register``
-command -- just like the ``upload`` command.
-
-Assuming there is an ``Example`` project with documentation in the
-subdirectory ``docs``, e.g.::
-
-  Example/
-  |-- example.py
-  |-- setup.cfg
-  |-- setup.py
-  |-- docs
-  |   |-- build
-  |   |   `-- html
-  |   |   |   |-- index.html
-  |   |   |   `-- tips_tricks.html
-  |   |-- conf.py
-  |   |-- index.txt
-  |   `-- tips_tricks.txt
-
-You can simply pass the documentation directory path to the ``upload_docs``
-command::
-
-    python setup.py upload_docs --upload-dir=docs/build/html
-
-If no ``--upload-dir`` is given, ``upload_docs`` will attempt to run the
-``build_sphinx`` command to generate uploadable documentation.
-For the command to become available, `Sphinx <http://sphinx.pocoo.org/>`_
-must be installed in the same environment as distribute.
-
-As with other ``setuptools``-based commands, you can define useful
-defaults in the ``setup.cfg`` of your Python project, e.g.:
-
-.. code-block:: ini
-
-    [upload_docs]
-    upload-dir = docs/build/html
-
-The ``upload_docs`` command has the following options:
-
-``--upload-dir``
-    The directory to be uploaded to the repository.
-
-``--show-response``
-    Display the full response text from server; this is useful for debugging
-    PyPI problems.
-
-``--repository=URL, -r URL``
-    The URL of the repository to upload to.  Defaults to
-    http://pypi.python.org/pypi (i.e., the main PyPI installation).
-
-
---------------------------------
-Extending and Reusing Distribute
---------------------------------
-
-Creating ``distutils`` Extensions
-=================================
-
-It can be hard to add new commands or setup arguments to the distutils.  But
-the ``setuptools`` package makes it a bit easier, by allowing you to distribute
-a distutils extension as a separate project, and then have projects that need
-the extension just refer to it in their ``setup_requires`` argument.
-
-With ``setuptools``, your distutils extension projects can hook in new
-commands and ``setup()`` arguments just by defining "entry points".  These
-are mappings from command or argument names to a specification of where to
-import a handler from.  (See the section on `Dynamic Discovery of Services and
-Plugins`_ above for some more background on entry points.)
-
-
-Adding Commands
----------------
-
-You can add new ``setup`` commands by defining entry points in the
-``distutils.commands`` group.  For example, if you wanted to add a ``foo``
-command, you might add something like this to your distutils extension
-project's setup script::
-
-    setup(
-        # ...
-        entry_points = {
-            "distutils.commands": [
-                "foo = mypackage.some_module:foo",
-            ],
-        },
-    )
-
-(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is
-a ``setuptools.Command`` subclass.)
-
-Once a project containing such entry points has been activated on ``sys.path``,
-(e.g. by running "install" or "develop" with a site-packages installation
-directory) the command(s) will be available to any ``setuptools``-based setup
-scripts.  It is not necessary to use the ``--command-packages`` option or
-to monkeypatch the ``distutils.command`` package to install your commands;
-``setuptools`` automatically adds a wrapper to the distutils to search for
-entry points in the active distributions on ``sys.path``.  In fact, this is
-how setuptools' own commands are installed: the setuptools project's setup
-script defines entry points for them!
-
-
-Adding ``setup()`` Arguments
-----------------------------
-
-Sometimes, your commands may need additional arguments to the ``setup()``
-call.  You can enable this by defining entry points in the
-``distutils.setup_keywords`` group.  For example, if you wanted a ``setup()``
-argument called ``bar_baz``, you might add something like this to your
-distutils extension project's setup script::
-
-    setup(
-        # ...
-        entry_points = {
-            "distutils.commands": [
-                "foo = mypackage.some_module:foo",
-            ],
-            "distutils.setup_keywords": [
-                "bar_baz = mypackage.some_module:validate_bar_baz",
-            ],
-        },
-    )
-
-The idea here is that the entry point defines a function that will be called
-to validate the ``setup()`` argument, if it's supplied.  The ``Distribution``
-object will have the initial value of the attribute set to ``None``, and the
-validation function will only be called if the ``setup()`` call sets it to
-a non-None value.  Here's an example validation function::
-
-    def assert_bool(dist, attr, value):
-        """Verify that value is True, False, 0, or 1"""
-        if bool(value) != value:
-            raise DistutilsSetupError(
-                "%r must be a boolean value (got %r)" % (attr,value)
-            )
-
-Your function should accept three arguments: the ``Distribution`` object,
-the attribute name, and the attribute value.  It should raise a
-``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument
-is invalid.  Remember, your function will only be called with non-None values,
-and the default value of arguments defined this way is always None.  So, your
-commands should always be prepared for the possibility that the attribute will
-be ``None`` when they access it later.
-
-If more than one active distribution defines an entry point for the same
-``setup()`` argument, *all* of them will be called.  This allows multiple
-distutils extensions to define a common argument, as long as they agree on
-what values of that argument are valid.
-
-Also note that as with commands, it is not necessary to subclass or monkeypatch
-the distutils ``Distribution`` class in order to add your arguments; it is
-sufficient to define the entry points in your extension, as long as any setup
-script using your extension lists your project in its ``setup_requires``
-argument.
-
-
-Adding new EGG-INFO Files
--------------------------
-
-Some extensible applications or frameworks may want to allow third parties to
-develop plugins with application or framework-specific metadata included in
-the plugins' EGG-INFO directory, for easy access via the ``pkg_resources``
-metadata API.  The easiest way to allow this is to create a distutils extension
-to be used from the plugin projects' setup scripts (via ``setup_requires``)
-that defines a new setup keyword, and then uses that data to write an EGG-INFO
-file when the ``egg_info`` command is run.
-
-The ``egg_info`` command looks for extension points in an ``egg_info.writers``
-group, and calls them to write the files.  Here's a simple example of a
-distutils extension defining a setup argument ``foo_bar``, which is a list of
-lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any
-project that uses the argument::
-
-    setup(
-        # ...
-        entry_points = {
-            "distutils.setup_keywords": [
-                "foo_bar = setuptools.dist:assert_string_list",
-            ],
-            "egg_info.writers": [
-                "foo_bar.txt = setuptools.command.egg_info:write_arg",
-            ],
-        },
-    )
-
-This simple example makes use of two utility functions defined by setuptools
-for its own use: a routine to validate that a setup keyword is a sequence of
-strings, and another one that looks up a setup argument and writes it to
-a file.  Here's what the writer utility looks like::
-
-    def write_arg(cmd, basename, filename):
-        argname = os.path.splitext(basename)[0]
-        value = getattr(cmd.distribution, argname, None)
-        if value is not None:
-            value = '\n'.join(value)+'\n'
-        cmd.write_or_delete_file(argname, filename, value)
-
-As you can see, ``egg_info.writers`` entry points must be a function taking
-three arguments: a ``egg_info`` command instance, the basename of the file to
-write (e.g. ``foo_bar.txt``), and the actual full filename that should be
-written to.
-
-In general, writer functions should honor the command object's ``dry_run``
-setting when writing files, and use the ``distutils.log`` object to do any
-console output.  The easiest way to conform to this requirement is to use
-the ``cmd`` object's ``write_file()``, ``delete_file()``, and
-``write_or_delete_file()`` methods exclusively for your file operations.  See
-those methods' docstrings for more details.
-
-
-Adding Support for Other Revision Control Systems
--------------------------------------------------
-
-If you would like to create a plugin for ``setuptools`` to find files in other
-source control systems besides CVS and Subversion, you can do so by adding an
-entry point to the ``setuptools.file_finders`` group.  The entry point should
-be a function accepting a single directory name, and should yield
-all the filenames within that directory (and any subdirectories thereof) that
-are under revision control.
-
-For example, if you were going to create a plugin for a revision control system
-called "foobar", you would write a function something like this:
-
-.. code-block:: python
-
-    def find_files_for_foobar(dirname):
-        # loop to yield paths that start with `dirname`
-
-And you would register it in a setup script using something like this::
-
-    entry_points = {
-        "setuptools.file_finders": [
-            "foobar = my_foobar_module:find_files_for_foobar"
-        ]
-    }
-
-Then, anyone who wants to use your plugin can simply install it, and their
-local setuptools installation will be able to find the necessary files.
-
-It is not necessary to distribute source control plugins with projects that
-simply use the other source control system, or to specify the plugins in
-``setup_requires``.  When you create a source distribution with the ``sdist``
-command, setuptools automatically records what files were found in the
-``SOURCES.txt`` file.  That way, recipients of source distributions don't need
-to have revision control at all.  However, if someone is working on a package
-by checking out with that system, they will need the same plugin(s) that the
-original author is using.
-
-A few important points for writing revision control file finders:
-
-* Your finder function MUST return relative paths, created by appending to the
-  passed-in directory name.  Absolute paths are NOT allowed, nor are relative
-  paths that reference a parent directory of the passed-in directory.
-
-* Your finder function MUST accept an empty string as the directory name,
-  meaning the current directory.  You MUST NOT convert this to a dot; just
-  yield relative paths.  So, yielding a subdirectory named ``some/dir`` under
-  the current directory should NOT be rendered as ``./some/dir`` or
-  ``/somewhere/some/dir``, but *always* as simply ``some/dir``
-
-* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
-  with the absence of needed programs (i.e., ones belonging to the revision
-  control system itself.  It *may*, however, use ``distutils.log.warn()`` to
-  inform the user of the missing program(s).
-
-
-Subclassing ``Command``
------------------------
-
-Sorry, this section isn't written yet, and neither is a lot of what's below
-this point, except for the change log.  You might want to `subscribe to changes
-in this page <setuptools?action=subscribe>`_ to see when new documentation is
-added or updated.
-
-XXX
-
-
-Reusing ``setuptools`` Code
-===========================
-
-``distribute_setup``
---------------------
-
-XXX
-
-
-``setuptools.archive_util``
----------------------------
-
-XXX
-
-
-``setuptools.sandbox``
-----------------------
-
-XXX
-
-
-``setuptools.package_index``
-----------------------------
-
-XXX
-
-History
-=======
-
-0.6c9
- * Fixed a missing files problem when using Windows source distributions on
-   non-Windows platforms, due to distutils not handling manifest file line
-   endings correctly.
-
- * Updated Pyrex support to work with Pyrex 0.9.6 and higher.
-
- * Minor changes for Jython compatibility, including skipping tests that can't
-   work on Jython.
-
- * Fixed not installing eggs in ``install_requires`` if they were also used for
-   ``setup_requires`` or ``tests_require``.
-
- * Fixed not fetching eggs in ``install_requires`` when running tests.
-
- * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools
-   installations when called from a standalone ``setup.py``.
-
- * Added a warning if a namespace package is declared, but its parent package
-   is not also declared as a namespace.
-
- * Support Subversion 1.5
-
- * Removed use of deprecated ``md5`` module if ``hashlib`` is available
-
- * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice
-
- * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's
-   ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``.
-
- * Ensure that _full_name is set on all shared libs before extensions are
-   checked for shared lib usage.  (Fixes a bug in the experimental shared
-   library build support.)
-
- * Fix to allow unpacked eggs containing native libraries to fail more
-   gracefully under Google App Engine (with an ``ImportError`` loading the
-   C-based module, instead of getting a ``NameError``).
-
-0.6c7
- * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and
-   ``egg_info`` command failing on new, uncommitted SVN directories.
-
- * Fix import problems with nested namespace packages installed via
-   ``--root`` or ``--single-version-externally-managed``, due to the
-   parent package not having the child package as an attribute.
-
-0.6c6
- * Added ``--egg-path`` option to ``develop`` command, allowing you to force
-   ``.egg-link`` files to use relative paths (allowing them to be shared across
-   platforms on a networked drive).
-
- * Fix not building binary RPMs correctly.
-
- * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with
-   bash-compatible shells.
-
- * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there
-   was whitespace inside a quoted argument or at the end of the ``#!`` line
-   (a regression introduced in 0.6c4).
-
- * Fix ``test`` command possibly failing if an older version of the project
-   being tested was installed on ``sys.path`` ahead of the test source
-   directory.
-
- * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in
-   their names as packages.
-
-0.6c5
- * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg``
-   packages under Python versions less than 2.5.
-
- * Fix uploaded ``bdist_wininst`` packages being described as suitable for
-   "any" version by Python 2.5, even if a ``--target-version`` was specified.
-
-0.6c4
- * Overhauled Windows script wrapping to support ``bdist_wininst`` better.
-   Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or
-   ``#!pythonw.exe`` as the executable name (even when built on non-Windows
-   platforms!), and the wrappers will look for the executable in the script's
-   parent directory (which should find the right version of Python).
-
- * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or
-   ``bdist_wininst`` under Python 2.3 and 2.4.
-
- * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is
-   prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish
-   platforms.  (This is mainly so that setuptools itself can have a single-file
-   installer on Unix, without doing multiple downloads, dealing with firewalls,
-   etc.)
-
- * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files
-
- * Use cross-platform relative paths in ``easy-install.pth`` when doing
-   ``develop`` and the source directory is a subdirectory of the installation
-   target directory.
-
- * Fix a problem installing eggs with a system packaging tool if the project
-   contained an implicit namespace package; for example if the ``setup()``
-   listed a namespace package ``foo.bar`` without explicitly listing ``foo``
-   as a namespace package.
-
-0.6c3
- * Fixed breakages caused by Subversion 1.4's new "working copy" format
-
-0.6c2
- * The ``ez_setup`` module displays the conflicting version of setuptools (and
-   its installation location) when a script requests a version that's not
-   available.
-
- * Running ``setup.py develop`` on a setuptools-using project will now install
-   setuptools if needed, instead of only downloading the egg.
-
-0.6c1
- * Fixed ``AttributeError`` when trying to download a ``setup_requires``
-   dependency when a distribution lacks a ``dependency_links`` setting.
-
- * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so
-   as to play better with packaging tools that complain about zero-length
-   files.
-
- * Made ``setup.py develop`` respect the ``--no-deps`` option, which it
-   previously was ignoring.
-
- * Support ``extra_path`` option to ``setup()`` when ``install`` is run in
-   backward-compatibility mode.
-
- * Source distributions now always include a ``setup.cfg`` file that explicitly
-   sets ``egg_info`` options such that they produce an identical version number
-   to the source distribution's version number.  (Previously, the default
-   version number could be different due to the use of ``--tag-date``, or if
-   the version was overridden on the command line that built the source
-   distribution.)
-
-0.6b4
- * Fix ``register`` not obeying name/version set by ``egg_info`` command, if
-   ``egg_info`` wasn't explicitly run first on the same command line.
-
- * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info``
-   command, to allow suppressing tags configured in ``setup.cfg``.
-
- * Fixed redundant warnings about missing ``README`` file(s); it should now
-   appear only if you are actually a source distribution.
-
-0.6b3
- * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``.
-
- * Allow ``.py`` files found by the ``include_package_data`` option to be
-   automatically included.  Remove duplicate data file matches if both
-   ``include_package_data`` and ``package_data`` are used to refer to the same
-   files.
-
-0.6b1
- * Strip ``module`` from the end of compiled extension modules when computing
-   the name of a ``.py`` loader/wrapper.  (Python's import machinery ignores
-   this suffix when searching for an extension module.)
-
-0.6a11
- * Added ``test_loader`` keyword to support custom test loaders
-
- * Added ``setuptools.file_finders`` entry point group to allow implementing
-   revision control plugins.
-
- * Added ``--identity`` option to ``upload`` command.
-
- * Added ``dependency_links`` to allow specifying URLs for ``--find-links``.
-
- * Enhanced test loader to scan packages as well as modules, and call
-   ``additional_tests()`` if present to get non-unittest tests.
-
- * Support namespace packages in conjunction with system packagers, by omitting
-   the installation of any ``__init__.py`` files for namespace packages, and
-   adding a special ``.pth`` file to create a working package in
-   ``sys.modules``.
-
- * Made ``--single-version-externally-managed`` automatic when ``--root`` is
-   used, so that most system packagers won't require special support for
-   setuptools.
-
- * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or
-   other configuration files for their option defaults when installing, and
-   also made the install use ``--multi-version`` mode so that the project
-   directory doesn't need to support .pth files.
-
- * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading
-   it.  Previously, the file could be left open and the actual error would be
-   masked by problems trying to remove the open file on Windows systems.
-
-0.6a10
- * Fixed the ``develop`` command ignoring ``--find-links``.
-
-0.6a9
- * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to
-   create source distributions.  ``MANIFEST.in`` is still read and processed,
-   as are the standard defaults and pruning.  But the manifest is built inside
-   the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt
-   every time the ``egg_info`` command is run.
-
- * Added the ``include_package_data`` keyword to ``setup()``, allowing you to
-   automatically include any package data listed in revision control or
-   ``MANIFEST.in``
-
- * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to
-   trim back files included via the ``package_data`` and
-   ``include_package_data`` options.
-
- * Fixed ``--tag-svn-revision`` not working when run from a source
-   distribution.
-
- * Added warning for namespace packages with missing ``declare_namespace()``
-
- * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages
-   requiring ``nose`` to run unit tests can make this dependency optional
-   unless the ``test`` command is run.
-
- * Made all commands that use ``easy_install`` respect its configuration
-   options, as this was causing some problems with ``setup.py install``.
-
- * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so
-   that you can process a directory tree through a processing filter as if it
-   were a zipfile or tarfile.
-
- * Added an internal ``install_egg_info`` command to use as part of old-style
-   ``install`` operations, that installs an ``.egg-info`` directory with the
-   package.
-
- * Added a ``--single-version-externally-managed`` option to the ``install``
-   command so that you can more easily wrap a "flat" egg in a system package.
-
- * Enhanced ``bdist_rpm`` so that it installs single-version eggs that
-   don't rely on a ``.pth`` file.  The ``--no-egg`` option has been removed,
-   since all RPMs are now built in a more backwards-compatible format.
-
- * Support full roundtrip translation of eggs to and from ``bdist_wininst``
-   format.  Running ``bdist_wininst`` on a setuptools-based package wraps the
-   egg in an .exe that will safely install it as an egg (i.e., with metadata
-   and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
-   back into an ``.egg`` file or directory and install it as such.
-
-
-0.6a8
- * Fixed some problems building extensions when Pyrex was installed, especially
-   with Python 2.4 and/or packages using SWIG.
-
- * Made ``develop`` command accept all the same options as ``easy_install``,
-   and use the ``easy_install`` command's configuration settings as defaults.
-
- * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision
-   number from ``PKG-INFO`` in case it is being run on a source distribution of
-   a snapshot taken from a Subversion-based project.
-
- * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being
-   installed as data, adding them to ``native_libs.txt`` automatically.
-
- * Fixed some problems with fresh checkouts of projects that don't include
-   ``.egg-info/PKG-INFO`` under revision control and put the project's source
-   code directly in the project directory.  If such a package had any
-   requirements that get processed before the ``egg_info`` command can be run,
-   the setup scripts would fail with a "Missing 'Version:' header and/or
-   PKG-INFO file" error, because the egg runtime interpreted the unbuilt
-   metadata in a directory on ``sys.path`` (i.e. the current directory) as
-   being a corrupted egg.  Setuptools now monkeypatches the distribution
-   metadata cache to pretend that the egg has valid version information, until
-   it has a chance to make it actually be so (via the ``egg_info`` command).
-
-0.6a5
- * Fixed missing gui/cli .exe files in distribution.  Fixed bugs in tests.
-
-0.6a3
- * Added ``gui_scripts`` entry point group to allow installing GUI scripts
-   on Windows and other platforms.  (The special handling is only for Windows;
-   other platforms are treated the same as for ``console_scripts``.)
-
-0.6a2
- * Added ``console_scripts`` entry point group to allow installing scripts
-   without the need to create separate script files.  On Windows, console
-   scripts get an ``.exe`` wrapper so you can just type their name.  On other
-   platforms, the scripts are written without a file extension.
-
-0.6a1
- * Added support for building "old-style" RPMs that don't install an egg for
-   the target package, using a ``--no-egg`` option.
-
- * The ``build_ext`` command now works better when using the ``--inplace``
-   option and multiple Python versions.  It now makes sure that all extensions
-   match the current Python version, even if newer copies were built for a
-   different Python version.
-
- * The ``upload`` command no longer attaches an extra ``.zip`` when uploading
-   eggs, as PyPI now supports egg uploads without trickery.
-
- * The ``ez_setup`` script/module now displays a warning before downloading
-   the setuptools egg, and attempts to check the downloaded egg against an
-   internal MD5 checksum table.
-
- * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the
-   latest revision number; it was using the revision number of the directory
-   containing ``setup.py``, not the highest revision number in the project.
-
- * Added ``eager_resources`` setup argument
-
- * The ``sdist`` command now recognizes Subversion "deleted file" entries and
-   does not include them in source distributions.
-
- * ``setuptools`` now embeds itself more thoroughly into the distutils, so that
-   other distutils extensions (e.g. py2exe, py2app) will subclass setuptools'
-   versions of things, rather than the native distutils ones.
-
- * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``;
-   ``setup_requires`` allows you to automatically find and download packages
-   that are needed in order to *build* your project (as opposed to running it).
-
- * ``setuptools`` now finds its commands, ``setup()`` argument validators, and
-   metadata writers using entry points, so that they can be extended by
-   third-party packages.  See `Creating distutils Extensions`_ above for more
-   details.
-
- * The vestigial ``depends`` command has been removed.  It was never finished
-   or documented, and never would have worked without EasyInstall - which it
-   pre-dated and was never compatible with.
-
-0.5a12
- * The zip-safety scanner now checks for modules that might be used with
-   ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't
-   handle ``-m`` on zipped modules.
-
-0.5a11
- * Fix breakage of the "develop" command that was caused by the addition of
-   ``--always-unzip`` to the ``easy_install`` command.
-
-0.5a9
- * Include ``svn:externals`` directories in source distributions as well as
-   normal subversion-controlled files and directories.
-
- * Added ``exclude=patternlist`` option to ``setuptools.find_packages()``
-
- * Changed --tag-svn-revision to include an "r" in front of the revision number
-   for better readability.
-
- * Added ability to build eggs without including source files (except for any
-   scripts, of course), using the ``--exclude-source-files`` option to
-   ``bdist_egg``.
-
- * ``setup.py install`` now automatically detects when an "unmanaged" package
-   or module is going to be on ``sys.path`` ahead of a package being installed,
-   thereby preventing the newer version from being imported.  If this occurs,
-   a warning message is output to ``sys.stderr``, but installation proceeds
-   anyway.  The warning message informs the user what files or directories
-   need deleting, and advises them they can also use EasyInstall (with the
-   ``--delete-conflicting`` option) to do it automatically.
-
- * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata
-   directory that lists all top-level modules and packages in the distribution.
-   This is used by the ``easy_install`` command to find possibly-conflicting
-   "unmanaged" packages when installing the distribution.
-
- * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``.
-   Added package analysis to determine zip-safety if the ``zip_safe`` flag
-   is not given, and advise the author regarding what code might need changing.
-
- * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``.
-
-0.5a8
- * The "egg_info" command now always sets the distribution metadata to "safe"
-   forms of the distribution name and version, so that distribution files will
-   be generated with parseable names (i.e., ones that don't include '-' in the
-   name or version).  Also, this means that if you use the various ``--tag``
-   options of "egg_info", any distributions generated will use the tags in the
-   version, not just egg distributions.
-
- * Added support for defining command aliases in distutils configuration files,
-   under the "[aliases]" section.  To prevent recursion and to allow aliases to
-   call the command of the same name, a given alias can be expanded only once
-   per command-line invocation.  You can define new aliases with the "alias"
-   command, either for the local, global, or per-user configuration.
-
- * Added "rotate" command to delete old distribution files, given a set of
-   patterns to match and the number of files to keep.  (Keeps the most
-   recently-modified distribution files matching each pattern.)
-
- * Added "saveopts" command that saves all command-line options for the current
-   invocation to the local, global, or per-user configuration file.  Useful for
-   setting defaults without having to hand-edit a configuration file.
-
- * Added a "setopt" command that sets a single option in a specified distutils
-   configuration file.
-
-0.5a7
- * Added "upload" support for egg and source distributions, including a bug
-   fix for "upload" and a temporary workaround for lack of .egg support in
-   PyPI.
-
-0.5a6
- * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it
-   will include all files under revision control (CVS or Subversion) in the
-   current directory, and it will regenerate the list every time you create a
-   source distribution, not just when you tell it to.  This should make the
-   default "do what you mean" more often than the distutils' default behavior
-   did, while still retaining the old behavior in the presence of MANIFEST.in.
-
- * Fixed the "develop" command always updating .pth files, even if you
-   specified ``-n`` or ``--dry-run``.
-
- * Slightly changed the format of the generated version when you use
-   ``--tag-build`` on the "egg_info" command, so that you can make tagged
-   revisions compare *lower* than the version specified in setup.py (e.g. by
-   using ``--tag-build=dev``).
-
-0.5a5
- * Added ``develop`` command to ``setuptools``-based packages.  This command
-   installs an ``.egg-link`` pointing to the package's source directory, and
-   script wrappers that ``execfile()`` the source versions of the package's
-   scripts.  This lets you put your development checkout(s) on sys.path without
-   having to actually install them.  (To uninstall the link, use
-   use ``setup.py develop --uninstall``.)
-
- * Added ``egg_info`` command to ``setuptools``-based packages.  This command
-   just creates or updates the "projectname.egg-info" directory, without
-   building an egg.  (It's used by the ``bdist_egg``, ``test``, and ``develop``
-   commands.)
-
- * Enhanced the ``test`` command so that it doesn't install the package, but
-   instead builds any C extensions in-place, updates the ``.egg-info``
-   metadata, adds the source directory to ``sys.path``, and runs the tests
-   directly on the source.  This avoids an "unmanaged" installation of the
-   package to ``site-packages`` or elsewhere.
-
- * Made ``easy_install`` a standard ``setuptools`` command, moving it from
-   the ``easy_install`` module to ``setuptools.command.easy_install``.  Note
-   that if you were importing or extending it, you must now change your imports
-   accordingly.  ``easy_install.py`` is still installed as a script, but not as
-   a module.
-
-0.5a4
- * Setup scripts using setuptools can now list their dependencies directly in
-   the setup.py file, without having to manually create a ``depends.txt`` file.
-   The ``install_requires`` and ``extras_require`` arguments to ``setup()``
-   are used to create a dependencies file automatically.  If you are manually
-   creating ``depends.txt`` right now, please switch to using these setup
-   arguments as soon as practical, because ``depends.txt`` support will be
-   removed in the 0.6 release cycle.  For documentation on the new arguments,
-   see the ``setuptools.dist.Distribution`` class.
-
- * Setup scripts using setuptools now always install using ``easy_install``
-   internally, for ease of uninstallation and upgrading.
-
-0.5a1
- * Added support for "self-installation" bootstrapping.  Packages can now
-   include ``ez_setup.py`` in their source distribution, and add the following
-   to their ``setup.py``, in order to automatically bootstrap installation of
-   setuptools as part of their setup process::
-
-    from ez_setup import use_setuptools
-    use_setuptools()
-
-    from setuptools import setup
-    # etc...
-
-0.4a2
- * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools
-   installation easier, and to allow distributions using setuptools to avoid
-   having to include setuptools in their source distribution.
-
- * All downloads are now managed by the ``PackageIndex`` class (which is now
-   subclassable and replaceable), so that embedders can more easily override
-   download logic, give download progress reports, etc.  The class has also
-   been moved to the new ``setuptools.package_index`` module.
-
- * The ``Installer`` class no longer handles downloading, manages a temporary
-   directory, or tracks the ``zip_ok`` option.  Downloading is now handled
-   by ``PackageIndex``, and ``Installer`` has become an ``easy_install``
-   command class based on ``setuptools.Command``.
-
- * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup
-   script in a directory sandbox, and a new ``setuptools.archive_util`` module
-   with an ``unpack_archive()`` API.  These were split out of EasyInstall to
-   allow reuse by other tools and applications.
-
- * ``setuptools.Command`` now supports reinitializing commands using keyword
-   arguments to set/reset options.  Also, ``Command`` subclasses can now set
-   their ``command_consumes_arguments`` attribute to ``True`` in order to
-   receive an ``args`` option containing the rest of the command line.
-
-0.3a2
- * Added new options to ``bdist_egg`` to allow tagging the egg's version number
-   with a subversion revision number, the current date, or an explicit tag
-   value.  Run ``setup.py bdist_egg --help`` to get more information.
-
- * Misc. bug fixes
-
-0.3a1
- * Initial release.
-
-Mailing List and Bug Tracker
-============================
-
-Please use the `distutils-sig mailing list`_ for questions and discussion about
-setuptools, and the `setuptools bug tracker`_ ONLY for issues you have
-confirmed via the list are actual bugs, and which you have reduced to a minimal
-set of steps to reproduce.
-
-.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
-.. _setuptools bug tracker: http://bugs.python.org/setuptools/
-
diff --git a/vendor/distribute-0.6.34/docs/using.txt b/vendor/distribute-0.6.34/docs/using.txt
deleted file mode 100644
index 192f1dc234a30dc1351c7f3cb7accd10e439ae72..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/docs/using.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-================================
-Using Distribute in your project
-================================
-
-To use Distribute in your project, the recommended way is to ship
-`distribute_setup.py` alongside your `setup.py` script and call 
-it at the very begining of `setup.py` like this::
-
-    from distribute_setup import use_setuptools
-    use_setuptools()
-
-Another way is to add ``Distribute`` in the ``install_requires`` option::
-
-    from setuptools import setup
-
-    setup(...
-          install_requires=['distribute']
-    )
-
-
-XXX to be finished
diff --git a/vendor/distribute-0.6.34/easy_install.py b/vendor/distribute-0.6.34/easy_install.py
deleted file mode 100644
index d87e984034b6e6e9eb456ebcb2b3f420c07a48bc..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/easy_install.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Run the EasyInstall command"""
-
-if __name__ == '__main__':
-    from setuptools.command.easy_install import main
-    main()
diff --git a/vendor/distribute-0.6.34/launcher.c b/vendor/distribute-0.6.34/launcher.c
deleted file mode 100644
index ea4c80b5c4e0103fb9fd9bb252463e969385972c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/launcher.c
+++ /dev/null
@@ -1,327 +0,0 @@
-/*  Setuptools Script Launcher for Windows
-
-    This is a stub executable for Windows that functions somewhat like
-    Effbot's "exemaker", in that it runs a script with the same name but
-    a .py extension, using information from a #! line.  It differs in that
-    it spawns the actual Python executable, rather than attempting to
-    hook into the Python DLL.  This means that the script will run with
-    sys.executable set to the Python executable, where exemaker ends up with
-    sys.executable pointing to itself.  (Which means it won't work if you try
-    to run another Python process using sys.executable.)
-
-    To build/rebuild with mingw32, do this in the setuptools project directory:
-
-       gcc -DGUI=0           -mno-cygwin -O -s -o setuptools/cli.exe launcher.c
-       gcc -DGUI=1 -mwindows -mno-cygwin -O -s -o setuptools/gui.exe launcher.c
-
-    It links to msvcrt.dll, but this shouldn't be a problem since it doesn't
-    actually run Python in the same process.  Note that using 'exec' instead
-    of 'spawn' doesn't work, because on Windows this leads to the Python
-    executable running in the *background*, attached to the same console
-    window, meaning you get a command prompt back *before* Python even finishes
-    starting.  So, we have to use spawnv() and wait for Python to exit before
-    continuing.  :(
-*/
-
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <windows.h>
-#include <tchar.h>
-#include <fcntl.h>
-
-int child_pid=0;
-
-int fail(char *format, char *data) {
-    /* Print error message to stderr and return 2 */
-    fprintf(stderr, format, data);
-    return 2;
-}
-
-char *quoted(char *data) {
-    int i, ln = strlen(data), nb;
-
-    /* We allocate twice as much space as needed to deal with worse-case
-       of having to escape everything. */
-    char *result = calloc(ln*2+3, sizeof(char));
-    char *presult = result;
-
-    *presult++ = '"';
-    for (nb=0, i=0; i < ln; i++)
-      {
-        if (data[i] == '\\')
-          nb += 1;
-        else if (data[i] == '"')
-          {
-            for (; nb > 0; nb--)
-              *presult++ = '\\';
-            *presult++ = '\\';
-          }
-        else
-          nb = 0;
-        *presult++ = data[i];
-      }
-
-    for (; nb > 0; nb--)        /* Deal w trailing slashes */
-      *presult++ = '\\';
-
-    *presult++ = '"';
-    *presult++ = 0;
-    return result;
-}
-
-
-
-
-
-
-
-
-
-
-char *loadable_exe(char *exename) {
-    /* HINSTANCE hPython;  DLL handle for python executable */
-    char *result;
-
-    /* hPython = LoadLibraryEx(exename, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
-    if (!hPython) return NULL; */
-
-    /* Return the absolute filename for spawnv */
-    result = calloc(MAX_PATH, sizeof(char));
-    strncpy(result, exename, MAX_PATH);
-    /*if (result) GetModuleFileNameA(hPython, result, MAX_PATH);
-
-    FreeLibrary(hPython); */
-    return result;
-}
-
-
-char *find_exe(char *exename, char *script) {
-    char drive[_MAX_DRIVE], dir[_MAX_DIR], fname[_MAX_FNAME], ext[_MAX_EXT];
-    char path[_MAX_PATH], c, *result;
-
-    /* convert slashes to backslashes for uniform search below */
-    result = exename;
-    while (c = *result++) if (c=='/') result[-1] = '\\';
-
-    _splitpath(exename, drive, dir, fname, ext);
-    if (drive[0] || dir[0]=='\\') {
-        return loadable_exe(exename);   /* absolute path, use directly */
-    }
-    /* Use the script's parent directory, which should be the Python home
-       (This should only be used for bdist_wininst-installed scripts, because
-        easy_install-ed scripts use the absolute path to python[w].exe
-    */
-    _splitpath(script, drive, dir, fname, ext);
-    result = dir + strlen(dir) -1;
-    if (*result == '\\') result--;
-    while (*result != '\\' && result>=dir) *result-- = 0;
-    _makepath(path, drive, dir, exename, NULL);
-    return loadable_exe(path);
-}
-
-
-char **parse_argv(char *cmdline, int *argc)
-{
-    /* Parse a command line in-place using MS C rules */
-
-    char **result = calloc(strlen(cmdline), sizeof(char *));
-    char *output = cmdline;
-    char c;
-    int nb = 0;
-    int iq = 0;
-    *argc = 0;
-
-    result[0] = output;
-    while (isspace(*cmdline)) cmdline++;   /* skip leading spaces */
-
-    do {
-        c = *cmdline++;
-        if (!c || (isspace(c) && !iq)) {
-            while (nb) {*output++ = '\\'; nb--; }
-            *output++ = 0;
-            result[++*argc] = output;
-            if (!c) return result;
-            while (isspace(*cmdline)) cmdline++;  /* skip leading spaces */
-            if (!*cmdline) return result;  /* avoid empty arg if trailing ws */
-            continue;
-        }
-        if (c == '\\')
-            ++nb;   /* count \'s */
-        else {
-            if (c == '"') {
-                if (!(nb & 1)) { iq = !iq; c = 0; }  /* skip " unless odd # of \ */
-                nb = nb >> 1;   /* cut \'s in half */
-            }
-            while (nb) {*output++ = '\\'; nb--; }
-            if (c) *output++ = c;
-        }
-    } while (1);
-}
-
-void pass_control_to_child(DWORD control_type) {
-    /*
-     * distribute-issue207
-     * passes the control event to child process (Python)
-     */
-    if (!child_pid) {
-        return;
-    }
-    GenerateConsoleCtrlEvent(child_pid,0);
-}
-
-BOOL control_handler(DWORD control_type) {
-    /* 
-     * distribute-issue207
-     * control event handler callback function
-     */
-    switch (control_type) {
-        case CTRL_C_EVENT:
-            pass_control_to_child(0);
-            break;
-    }
-    return TRUE;
-}
-
-int create_and_wait_for_subprocess(char* command) {
-    /*
-     * distribute-issue207
-     * launches child process (Python)
-     */
-    DWORD return_value = 0;
-    LPSTR commandline = command;
-    STARTUPINFOA s_info;
-    PROCESS_INFORMATION p_info;
-    ZeroMemory(&p_info, sizeof(p_info));
-    ZeroMemory(&s_info, sizeof(s_info));
-    s_info.cb = sizeof(STARTUPINFO);
-    // set-up control handler callback funciotn
-    SetConsoleCtrlHandler((PHANDLER_ROUTINE) control_handler, TRUE);
-    if (!CreateProcessA(NULL, commandline, NULL, NULL, TRUE, 0, NULL, NULL, &s_info, &p_info)) {
-        fprintf(stderr, "failed to create process.\n");
-        return 0;
-    }   
-    child_pid = p_info.dwProcessId;
-    // wait for Python to exit
-    WaitForSingleObject(p_info.hProcess, INFINITE);
-    if (!GetExitCodeProcess(p_info.hProcess, &return_value)) {
-        fprintf(stderr, "failed to get exit code from process.\n");
-        return 0;
-    }
-    return return_value;
-}
-
-char* join_executable_and_args(char *executable, char **args, int argc)
-{
-    /*
-     * distribute-issue207
-     * CreateProcess needs a long string of the executable and command-line arguments,
-     * so we need to convert it from the args that was built
-     */
-    int len,counter;
-    char* cmdline;
-    
-    len=strlen(executable)+2;
-    for (counter=1; counter<argc; counter++) {
-        len+=strlen(args[counter])+1;
-    }
-
-    cmdline = (char*)calloc(len, sizeof(char));
-    sprintf(cmdline, "%s", executable);
-    len=strlen(executable);
-    for (counter=1; counter<argc; counter++) {
-        sprintf(cmdline+len, " %s", args[counter]);
-        len+=strlen(args[counter])+1;
-    }
-    return cmdline;
-}
-
-int run(int argc, char **argv, int is_gui) {
-
-    char python[256];   /* python executable's filename*/
-    char *pyopt;        /* Python option */
-    char script[256];   /* the script's filename */
-
-    int scriptf;        /* file descriptor for script file */
-
-    char **newargs, **newargsp, **parsedargs; /* argument array for exec */
-    char *ptr, *end;    /* working pointers for string manipulation */
-    char *cmdline;
-    int i, parsedargc;              /* loop counter */
-
-    /* compute script name from our .exe name*/
-    GetModuleFileNameA(NULL, script, sizeof(script));
-    end = script + strlen(script);
-    while( end>script && *end != '.')
-        *end-- = '\0';
-    *end-- = '\0';
-    strcat(script, (GUI ? "-script.pyw" : "-script.py"));
-
-    /* figure out the target python executable */
-
-    scriptf = open(script, O_RDONLY);
-    if (scriptf == -1) {
-        return fail("Cannot open %s\n", script);
-    }
-    end = python + read(scriptf, python, sizeof(python));
-    close(scriptf);
-
-    ptr = python-1;
-    while(++ptr < end && *ptr && *ptr!='\n' && *ptr!='\r') {;}
-
-    *ptr-- = '\0';
-
-    if (strncmp(python, "#!", 2)) {
-        /* default to python.exe if no #! header */
-        strcpy(python, "#!python.exe");
-    }
-
-    parsedargs = parse_argv(python+2, &parsedargc);
-
-    /* Using spawnv() can fail strangely if you e.g. find the Cygwin
-       Python, so we'll make sure Windows can find and load it */
-
-    ptr = find_exe(parsedargs[0], script);
-    if (!ptr) {
-        return fail("Cannot find Python executable %s\n", parsedargs[0]);
-    }
-
-    /* printf("Python executable: %s\n", ptr); */
-
-    /* Argument array needs to be
-       parsedargc + argc, plus 1 for null sentinel */
-
-    newargs = (char **)calloc(parsedargc + argc + 1, sizeof(char *));
-    newargsp = newargs;
-
-    *newargsp++ = quoted(ptr);
-    for (i = 1; i<parsedargc; i++) *newargsp++ = quoted(parsedargs[i]);
-
-    *newargsp++ = quoted(script);
-    for (i = 1; i < argc; i++)     *newargsp++ = quoted(argv[i]);
-
-    *newargsp++ = NULL;
-
-    /* printf("args 0: %s\nargs 1: %s\n", newargs[0], newargs[1]); */
-
-    if (is_gui) {
-        /* Use exec, we don't need to wait for the GUI to finish */
-        execv(ptr, (const char * const *)(newargs));
-        return fail("Could not exec %s", ptr);   /* shouldn't get here! */
-    }
-
-    /*
-     * distribute-issue207: using CreateProcessA instead of spawnv
-     */
-    cmdline = join_executable_and_args(ptr, newargs, parsedargc + argc);
-    return create_and_wait_for_subprocess(cmdline);
-}
-
-int WINAPI WinMain(HINSTANCE hI, HINSTANCE hP, LPSTR lpCmd, int nShow) {
-    return run(__argc, __argv, GUI);
-}
-
-int main(int argc, char** argv) {
-    return run(argc, argv, GUI);
-}
-
diff --git a/vendor/distribute-0.6.34/pkg_resources.py b/vendor/distribute-0.6.34/pkg_resources.py
deleted file mode 100644
index 49aab6757f101eabf5626a2a72155f6f7976f0fb..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/pkg_resources.py
+++ /dev/null
@@ -1,2825 +0,0 @@
-"""Package resource API
---------------------
-
-A resource is a logical file contained within a package, or a logical
-subdirectory thereof.  The package resource API expects resource names
-to have their path parts separated with ``/``, *not* whatever the local
-path separator is.  Do not use os.path operations to manipulate resource
-names being passed into the API.
-
-The package resource API is designed to work with normal filesystem packages,
-.egg files, and unpacked .egg files.  It can also work in a limited way with
-.zip files and with custom PEP 302 loaders that support the ``get_data()``
-method.
-"""
-
-import sys, os, zipimport, time, re, imp, types
-from urlparse import urlparse, urlunparse
-
-try:
-    frozenset
-except NameError:
-    from sets import ImmutableSet as frozenset
-
-# capture these to bypass sandboxing
-from os import utime
-try:
-    from os import mkdir, rename, unlink
-    WRITE_SUPPORT = True
-except ImportError:
-    # no write support, probably under GAE
-    WRITE_SUPPORT = False
-
-from os import open as os_open
-from os.path import isdir, split
-
-# Avoid try/except due to potential problems with delayed import mechanisms.
-if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
-    import importlib._bootstrap as importlib_bootstrap
-else:
-    importlib_bootstrap = None
-
-# This marker is used to simplify the process that checks is the
-# setuptools package was installed by the Setuptools project
-# or by the Distribute project, in case Setuptools creates
-# a distribution with the same version.
-#
-# The bootstrapping script for instance, will check if this
-# attribute is present to decide wether to reinstall the package
-_distribute = True
-
-def _bypass_ensure_directory(name, mode=0777):
-    # Sandbox-bypassing version of ensure_directory()
-    if not WRITE_SUPPORT:
-        raise IOError('"os.mkdir" not supported on this platform.')
-    dirname, filename = split(name)
-    if dirname and filename and not isdir(dirname):
-        _bypass_ensure_directory(dirname)
-        mkdir(dirname, mode)
-
-
-_state_vars = {}
-
-def _declare_state(vartype, **kw):
-    g = globals()
-    for name, val in kw.iteritems():
-        g[name] = val
-        _state_vars[name] = vartype
-
-def __getstate__():
-    state = {}
-    g = globals()
-    for k, v in _state_vars.iteritems():
-        state[k] = g['_sget_'+v](g[k])
-    return state
-
-def __setstate__(state):
-    g = globals()
-    for k, v in state.iteritems():
-        g['_sset_'+_state_vars[k]](k, g[k], v)
-    return state
-
-def _sget_dict(val):
-    return val.copy()
-
-def _sset_dict(key, ob, state):
-    ob.clear()
-    ob.update(state)
-
-def _sget_object(val):
-    return val.__getstate__()
-
-def _sset_object(key, ob, state):
-    ob.__setstate__(state)
-
-_sget_none = _sset_none = lambda *args: None
-
-
-
-def get_supported_platform():
-    """Return this platform's maximum compatible version.
-
-    distutils.util.get_platform() normally reports the minimum version
-    of Mac OS X that would be required to *use* extensions produced by
-    distutils.  But what we want when checking compatibility is to know the
-    version of Mac OS X that we are *running*.  To allow usage of packages that
-    explicitly require a newer version of Mac OS X, we must also know the
-    current version of the OS.
-
-    If this condition occurs for any other platform with a version in its
-    platform strings, this function should be extended accordingly.
-    """
-    plat = get_build_platform(); m = macosVersionString.match(plat)
-    if m is not None and sys.platform == "darwin":
-        try:
-            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
-        except ValueError:
-            pass    # not Mac OS X
-    return plat
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-__all__ = [
-    # Basic resource access and distribution/entry point discovery
-    'require', 'run_script', 'get_provider',  'get_distribution',
-    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
-    'resource_string', 'resource_stream', 'resource_filename',
-    'resource_listdir', 'resource_exists', 'resource_isdir',
-
-    # Environmental control
-    'declare_namespace', 'working_set', 'add_activation_listener',
-    'find_distributions', 'set_extraction_path', 'cleanup_resources',
-    'get_default_cache',
-
-    # Primary implementation classes
-    'Environment', 'WorkingSet', 'ResourceManager',
-    'Distribution', 'Requirement', 'EntryPoint',
-
-    # Exceptions
-    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
-    'ExtractionError',
-
-    # Parsing functions and string utilities
-    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
-    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
-    'safe_extra', 'to_filename',
-
-    # filesystem utilities
-    'ensure_directory', 'normalize_path',
-
-    # Distribution "precedence" constants
-    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
-
-    # "Provider" interfaces, implementations, and registration/lookup APIs
-    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
-    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
-    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
-    'register_finder', 'register_namespace_handler', 'register_loader_type',
-    'fixup_namespace_packages', 'get_importer',
-
-    # Deprecated/backward compatibility only
-    'run_main', 'AvailableDistributions',
-]
-class ResolutionError(Exception):
-    """Abstract base for dependency resolution errors"""
-    def __repr__(self):
-        return self.__class__.__name__+repr(self.args)
-
-class VersionConflict(ResolutionError):
-    """An already-installed version conflicts with the requested version"""
-
-class DistributionNotFound(ResolutionError):
-    """A requested distribution was not found"""
-
-class UnknownExtra(ResolutionError):
-    """Distribution doesn't have an "extra feature" of the given name"""
-_provider_factories = {}
-
-PY_MAJOR = sys.version[:3]
-EGG_DIST    = 3
-BINARY_DIST = 2
-SOURCE_DIST = 1
-CHECKOUT_DIST = 0
-DEVELOP_DIST = -1
-
-def register_loader_type(loader_type, provider_factory):
-    """Register `provider_factory` to make providers for `loader_type`
-
-    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
-    and `provider_factory` is a function that, passed a *module* object,
-    returns an ``IResourceProvider`` for that module.
-    """
-    _provider_factories[loader_type] = provider_factory
-
-def get_provider(moduleOrReq):
-    """Return an IResourceProvider for the named module or requirement"""
-    if isinstance(moduleOrReq,Requirement):
-        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
-    try:
-        module = sys.modules[moduleOrReq]
-    except KeyError:
-        __import__(moduleOrReq)
-        module = sys.modules[moduleOrReq]
-    loader = getattr(module, '__loader__', None)
-    return _find_adapter(_provider_factories, loader)(module)
-
-def _macosx_vers(_cache=[]):
-    if not _cache:
-        import platform
-        version = platform.mac_ver()[0]
-        # fallback for MacPorts
-        if version == '':
-            import plistlib
-            plist = '/System/Library/CoreServices/SystemVersion.plist'
-            if os.path.exists(plist):
-                if hasattr(plistlib, 'readPlist'):
-                    plist_content = plistlib.readPlist(plist)
-                    if 'ProductVersion' in plist_content:
-                        version = plist_content['ProductVersion']
-
-        _cache.append(version.split('.'))
-    return _cache[0]
-
-def _macosx_arch(machine):
-    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
-
-def get_build_platform():
-    """Return this platform's string for platform-specific distributions
-
-    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
-    needs some hacks for Linux and Mac OS X.
-    """
-    try:
-        from distutils.util import get_platform
-    except ImportError:
-        from sysconfig import get_platform
-
-    plat = get_platform()
-    if sys.platform == "darwin" and not plat.startswith('macosx-'):
-        try:
-            version = _macosx_vers()
-            machine = os.uname()[4].replace(" ", "_")
-            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
-                _macosx_arch(machine))
-        except ValueError:
-            # if someone is running a non-Mac darwin system, this will fall
-            # through to the default implementation
-            pass
-    return plat
-
-macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
-darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
-get_platform = get_build_platform   # XXX backward compat
-
-def compatible_platforms(provided,required):
-    """Can code for the `provided` platform run on the `required` platform?
-
-    Returns true if either platform is ``None``, or the platforms are equal.
-
-    XXX Needs compatibility checks for Linux and other unixy OSes.
-    """
-    if provided is None or required is None or provided==required:
-        return True     # easy case
-
-    # Mac OS X special cases
-    reqMac = macosVersionString.match(required)
-    if reqMac:
-        provMac = macosVersionString.match(provided)
-
-        # is this a Mac package?
-        if not provMac:
-            # this is backwards compatibility for packages built before
-            # setuptools 0.6. All packages built after this point will
-            # use the new macosx designation.
-            provDarwin = darwinVersionString.match(provided)
-            if provDarwin:
-                dversion = int(provDarwin.group(1))
-                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
-                if dversion == 7 and macosversion >= "10.3" or \
-                    dversion == 8 and macosversion >= "10.4":
-
-                    #import warnings
-                    #warnings.warn("Mac eggs should be rebuilt to "
-                    #    "use the macosx designation instead of darwin.",
-                    #    category=DeprecationWarning)
-                    return True
-            return False    # egg isn't macosx or legacy darwin
-
-        # are they the same major version and machine type?
-        if provMac.group(1) != reqMac.group(1) or \
-            provMac.group(3) != reqMac.group(3):
-            return False
-
-
-
-        # is the required OS major update >= the provided one?
-        if int(provMac.group(2)) > int(reqMac.group(2)):
-            return False
-
-        return True
-
-    # XXX Linux and other platforms' special cases should go here
-    return False
-
-
-def run_script(dist_spec, script_name):
-    """Locate distribution `dist_spec` and run its `script_name` script"""
-    ns = sys._getframe(1).f_globals
-    name = ns['__name__']
-    ns.clear()
-    ns['__name__'] = name
-    require(dist_spec)[0].run_script(script_name, ns)
-
-run_main = run_script   # backward compatibility
-
-def get_distribution(dist):
-    """Return a current distribution object for a Requirement or string"""
-    if isinstance(dist,basestring): dist = Requirement.parse(dist)
-    if isinstance(dist,Requirement): dist = get_provider(dist)
-    if not isinstance(dist,Distribution):
-        raise TypeError("Expected string, Requirement, or Distribution", dist)
-    return dist
-
-def load_entry_point(dist, group, name):
-    """Return `name` entry point of `group` for `dist` or raise ImportError"""
-    return get_distribution(dist).load_entry_point(group, name)
-
-def get_entry_map(dist, group=None):
-    """Return the entry point map for `group`, or the full entry map"""
-    return get_distribution(dist).get_entry_map(group)
-
-def get_entry_info(dist, group, name):
-    """Return the EntryPoint object for `group`+`name`, or ``None``"""
-    return get_distribution(dist).get_entry_info(group, name)
-
-
-class IMetadataProvider:
-
-    def has_metadata(name):
-        """Does the package's distribution contain the named metadata?"""
-
-    def get_metadata(name):
-        """The named metadata resource as a string"""
-
-    def get_metadata_lines(name):
-        """Yield named metadata resource as list of non-blank non-comment lines
-
-       Leading and trailing whitespace is stripped from each line, and lines
-       with ``#`` as the first non-blank character are omitted."""
-
-    def metadata_isdir(name):
-        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
-
-    def metadata_listdir(name):
-        """List of metadata names in the directory (like ``os.listdir()``)"""
-
-    def run_script(script_name, namespace):
-        """Execute the named script in the supplied namespace dictionary"""
-
-
-
-
-
-
-
-
-
-
-class IResourceProvider(IMetadataProvider):
-    """An object that provides access to package resources"""
-
-    def get_resource_filename(manager, resource_name):
-        """Return a true filesystem path for `resource_name`
-
-        `manager` must be an ``IResourceManager``"""
-
-    def get_resource_stream(manager, resource_name):
-        """Return a readable file-like object for `resource_name`
-
-        `manager` must be an ``IResourceManager``"""
-
-    def get_resource_string(manager, resource_name):
-        """Return a string containing the contents of `resource_name`
-
-        `manager` must be an ``IResourceManager``"""
-
-    def has_resource(resource_name):
-        """Does the package contain the named resource?"""
-
-    def resource_isdir(resource_name):
-        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
-
-    def resource_listdir(resource_name):
-        """List of resource names in the directory (like ``os.listdir()``)"""
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class WorkingSet(object):
-    """A collection of active distributions on sys.path (or a similar list)"""
-
-    def __init__(self, entries=None):
-        """Create working set from list of path entries (default=sys.path)"""
-        self.entries = []
-        self.entry_keys = {}
-        self.by_key = {}
-        self.callbacks = []
-
-        if entries is None:
-            entries = sys.path
-
-        for entry in entries:
-            self.add_entry(entry)
-
-
-    def add_entry(self, entry):
-        """Add a path item to ``.entries``, finding any distributions on it
-
-        ``find_distributions(entry,True)`` is used to find distributions
-        corresponding to the path entry, and they are added.  `entry` is
-        always appended to ``.entries``, even if it is already present.
-        (This is because ``sys.path`` can contain the same value more than
-        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
-        equal ``sys.path``.)
-        """
-        self.entry_keys.setdefault(entry, [])
-        self.entries.append(entry)
-        for dist in find_distributions(entry, True):
-            self.add(dist, entry, False)
-
-
-    def __contains__(self,dist):
-        """True if `dist` is the active distribution for its project"""
-        return self.by_key.get(dist.key) == dist
-
-
-
-
-
-    def find(self, req):
-        """Find a distribution matching requirement `req`
-
-        If there is an active distribution for the requested project, this
-        returns it as long as it meets the version requirement specified by
-        `req`.  But, if there is an active distribution for the project and it
-        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
-        If there is no active distribution for the requested project, ``None``
-        is returned.
-        """
-        dist = self.by_key.get(req.key)
-        if dist is not None and dist not in req:
-            raise VersionConflict(dist,req)     # XXX add more info
-        else:
-            return dist
-
-    def iter_entry_points(self, group, name=None):
-        """Yield entry point objects from `group` matching `name`
-
-        If `name` is None, yields all entry points in `group` from all
-        distributions in the working set, otherwise only ones matching
-        both `group` and `name` are yielded (in distribution order).
-        """
-        for dist in self:
-            entries = dist.get_entry_map(group)
-            if name is None:
-                for ep in entries.values():
-                    yield ep
-            elif name in entries:
-                yield entries[name]
-
-    def run_script(self, requires, script_name):
-        """Locate distribution for `requires` and run `script_name` script"""
-        ns = sys._getframe(1).f_globals
-        name = ns['__name__']
-        ns.clear()
-        ns['__name__'] = name
-        self.require(requires)[0].run_script(script_name, ns)
-
-
-
-    def __iter__(self):
-        """Yield distributions for non-duplicate projects in the working set
-
-        The yield order is the order in which the items' path entries were
-        added to the working set.
-        """
-        seen = {}
-        for item in self.entries:
-            if item not in self.entry_keys:
-                # workaround a cache issue
-                continue
-
-            for key in self.entry_keys[item]:
-                if key not in seen:
-                    seen[key]=1
-                    yield self.by_key[key]
-
-    def add(self, dist, entry=None, insert=True):
-        """Add `dist` to working set, associated with `entry`
-
-        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
-        On exit from this routine, `entry` is added to the end of the working
-        set's ``.entries`` (if it wasn't already present).
-
-        `dist` is only added to the working set if it's for a project that
-        doesn't already have a distribution in the set.  If it's added, any
-        callbacks registered with the ``subscribe()`` method will be called.
-        """
-        if insert:
-            dist.insert_on(self.entries, entry)
-
-        if entry is None:
-            entry = dist.location
-        keys = self.entry_keys.setdefault(entry,[])
-        keys2 = self.entry_keys.setdefault(dist.location,[])
-        if dist.key in self.by_key:
-            return      # ignore hidden distros
-
-        self.by_key[dist.key] = dist
-        if dist.key not in keys:
-            keys.append(dist.key)
-        if dist.key not in keys2:
-            keys2.append(dist.key)
-        self._added_new(dist)
-
-    def resolve(self, requirements, env=None, installer=None, replacement=True):
-        """List all distributions needed to (recursively) meet `requirements`
-
-        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
-        if supplied, should be an ``Environment`` instance.  If
-        not supplied, it defaults to all distributions available within any
-        entry or distribution in the working set.  `installer`, if supplied,
-        will be invoked with each requirement that cannot be met by an
-        already-installed distribution; it should return a ``Distribution`` or
-        ``None``.
-        """
-
-        requirements = list(requirements)[::-1]  # set up the stack
-        processed = {}  # set of processed requirements
-        best = {}  # key -> dist
-        to_activate = []
-
-        while requirements:
-            req = requirements.pop(0)   # process dependencies breadth-first
-            if _override_setuptools(req) and replacement:
-                req = Requirement.parse('distribute')
-
-            if req in processed:
-                # Ignore cyclic or redundant dependencies
-                continue
-            dist = best.get(req.key)
-            if dist is None:
-                # Find the best distribution and add it to the map
-                dist = self.by_key.get(req.key)
-                if dist is None:
-                    if env is None:
-                        env = Environment(self.entries)
-                    dist = best[req.key] = env.best_match(req, self, installer)
-                    if dist is None:
-                        #msg = ("The '%s' distribution was not found on this "
-                        #       "system, and is required by this application.")
-                        #raise DistributionNotFound(msg % req)
-
-                        # unfortunately, zc.buildout uses a str(err)
-                        # to get the name of the distribution here..
-                        raise DistributionNotFound(req)
-                to_activate.append(dist)
-            if dist not in req:
-                # Oops, the "best" so far conflicts with a dependency
-                raise VersionConflict(dist,req) # XXX put more info here
-            requirements.extend(dist.requires(req.extras)[::-1])
-            processed[req] = True
-
-        return to_activate    # return list of distros to activate
-
-    def find_plugins(self,
-        plugin_env, full_env=None, installer=None, fallback=True
-    ):
-        """Find all activatable distributions in `plugin_env`
-
-        Example usage::
-
-            distributions, errors = working_set.find_plugins(
-                Environment(plugin_dirlist)
-            )
-            map(working_set.add, distributions)  # add plugins+libs to sys.path
-            print 'Could not load', errors        # display errors
-
-        The `plugin_env` should be an ``Environment`` instance that contains
-        only distributions that are in the project's "plugin directory" or
-        directories. The `full_env`, if supplied, should be an ``Environment``
-        contains all currently-available distributions.  If `full_env` is not
-        supplied, one is created automatically from the ``WorkingSet`` this
-        method is called on, which will typically mean that every directory on
-        ``sys.path`` will be scanned for distributions.
-
-        `installer` is a standard installer callback as used by the
-        ``resolve()`` method. The `fallback` flag indicates whether we should
-        attempt to resolve older versions of a plugin if the newest version
-        cannot be resolved.
-
-        This method returns a 2-tuple: (`distributions`, `error_info`), where
-        `distributions` is a list of the distributions found in `plugin_env`
-        that were loadable, along with any other distributions that are needed
-        to resolve their dependencies.  `error_info` is a dictionary mapping
-        unloadable plugin distributions to an exception instance describing the
-        error that occurred. Usually this will be a ``DistributionNotFound`` or
-        ``VersionConflict`` instance.
-        """
-
-        plugin_projects = list(plugin_env)
-        plugin_projects.sort()  # scan project names in alphabetic order
-
-        error_info = {}
-        distributions = {}
-
-        if full_env is None:
-            env = Environment(self.entries)
-            env += plugin_env
-        else:
-            env = full_env + plugin_env
-
-        shadow_set = self.__class__([])
-        map(shadow_set.add, self)   # put all our entries in shadow_set
-
-        for project_name in plugin_projects:
-
-            for dist in plugin_env[project_name]:
-
-                req = [dist.as_requirement()]
-
-                try:
-                    resolvees = shadow_set.resolve(req, env, installer)
-
-                except ResolutionError,v:
-                    error_info[dist] = v    # save error info
-                    if fallback:
-                        continue    # try the next older version of project
-                    else:
-                        break       # give up on this project, keep going
-
-                else:
-                    map(shadow_set.add, resolvees)
-                    distributions.update(dict.fromkeys(resolvees))
-
-                    # success, no need to try any more versions of this project
-                    break
-
-        distributions = list(distributions)
-        distributions.sort()
-
-        return distributions, error_info
-
-
-
-
-
-    def require(self, *requirements):
-        """Ensure that distributions matching `requirements` are activated
-
-        `requirements` must be a string or a (possibly-nested) sequence
-        thereof, specifying the distributions and versions required.  The
-        return value is a sequence of the distributions that needed to be
-        activated to fulfill the requirements; all relevant distributions are
-        included, even if they were already activated in this working set.
-        """
-
-        needed = self.resolve(parse_requirements(requirements))
-
-        for dist in needed:
-            self.add(dist)
-
-        return needed
-
-
-    def subscribe(self, callback):
-        """Invoke `callback` for all distributions (including existing ones)"""
-        if callback in self.callbacks:
-            return
-        self.callbacks.append(callback)
-        for dist in self:
-            callback(dist)
-
-
-    def _added_new(self, dist):
-        for callback in self.callbacks:
-            callback(dist)
-
-    def __getstate__(self):
-        return (self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
-                self.callbacks[:])
-
-    def __setstate__(self, (entries, keys, by_key, callbacks)):
-        self.entries = entries[:]
-        self.entry_keys = keys.copy()
-        self.by_key = by_key.copy()
-        self.callbacks = callbacks[:]
-
-
-
-
-class Environment(object):
-    """Searchable snapshot of distributions on a search path"""
-
-    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
-        """Snapshot distributions available on a search path
-
-        Any distributions found on `search_path` are added to the environment.
-        `search_path` should be a sequence of ``sys.path`` items.  If not
-        supplied, ``sys.path`` is used.
-
-        `platform` is an optional string specifying the name of the platform
-        that platform-specific distributions must be compatible with.  If
-        unspecified, it defaults to the current platform.  `python` is an
-        optional string naming the desired version of Python (e.g. ``'2.4'``);
-        it defaults to the current version.
-
-        You may explicitly set `platform` (and/or `python`) to ``None`` if you
-        wish to map *all* distributions, not just those compatible with the
-        running platform or Python version.
-        """
-        self._distmap = {}
-        self._cache = {}
-        self.platform = platform
-        self.python = python
-        self.scan(search_path)
-
-    def can_add(self, dist):
-        """Is distribution `dist` acceptable for this environment?
-
-        The distribution must match the platform and python version
-        requirements specified when this environment was created, or False
-        is returned.
-        """
-        return (self.python is None or dist.py_version is None
-            or dist.py_version==self.python) \
-           and compatible_platforms(dist.platform,self.platform)
-
-    def remove(self, dist):
-        """Remove `dist` from the environment"""
-        self._distmap[dist.key].remove(dist)
-
-    def scan(self, search_path=None):
-        """Scan `search_path` for distributions usable in this environment
-
-        Any distributions found are added to the environment.
-        `search_path` should be a sequence of ``sys.path`` items.  If not
-        supplied, ``sys.path`` is used.  Only distributions conforming to
-        the platform/python version defined at initialization are added.
-        """
-        if search_path is None:
-            search_path = sys.path
-
-        for item in search_path:
-            for dist in find_distributions(item):
-                self.add(dist)
-
-    def __getitem__(self,project_name):
-        """Return a newest-to-oldest list of distributions for `project_name`
-        """
-        try:
-            return self._cache[project_name]
-        except KeyError:
-            project_name = project_name.lower()
-            if project_name not in self._distmap:
-                return []
-
-        if project_name not in self._cache:
-            dists = self._cache[project_name] = self._distmap[project_name]
-            _sort_dists(dists)
-
-        return self._cache[project_name]
-
-    def add(self,dist):
-        """Add `dist` if we ``can_add()`` it and it isn't already added"""
-        if self.can_add(dist) and dist.has_version():
-            dists = self._distmap.setdefault(dist.key,[])
-            if dist not in dists:
-                dists.append(dist)
-                if dist.key in self._cache:
-                    _sort_dists(self._cache[dist.key])
-
-
-    def best_match(self, req, working_set, installer=None):
-        """Find distribution best matching `req` and usable on `working_set`
-
-        This calls the ``find(req)`` method of the `working_set` to see if a
-        suitable distribution is already active.  (This may raise
-        ``VersionConflict`` if an unsuitable version of the project is already
-        active in the specified `working_set`.)  If a suitable distribution
-        isn't active, this method returns the newest distribution in the
-        environment that meets the ``Requirement`` in `req`.  If no suitable
-        distribution is found, and `installer` is supplied, then the result of
-        calling the environment's ``obtain(req, installer)`` method will be
-        returned.
-        """
-        dist = working_set.find(req)
-        if dist is not None:
-            return dist
-        for dist in self[req.key]:
-            if dist in req:
-                return dist
-        return self.obtain(req, installer) # try and download/install
-
-    def obtain(self, requirement, installer=None):
-        """Obtain a distribution matching `requirement` (e.g. via download)
-
-        Obtain a distro that matches requirement (e.g. via download).  In the
-        base ``Environment`` class, this routine just returns
-        ``installer(requirement)``, unless `installer` is None, in which case
-        None is returned instead.  This method is a hook that allows subclasses
-        to attempt other ways of obtaining a distribution before falling back
-        to the `installer` argument."""
-        if installer is not None:
-            return installer(requirement)
-
-    def __iter__(self):
-        """Yield the unique project names of the available distributions"""
-        for key in self._distmap.keys():
-            if self[key]: yield key
-
-
-
-
-    def __iadd__(self, other):
-        """In-place addition of a distribution or environment"""
-        if isinstance(other,Distribution):
-            self.add(other)
-        elif isinstance(other,Environment):
-            for project in other:
-                for dist in other[project]:
-                    self.add(dist)
-        else:
-            raise TypeError("Can't add %r to environment" % (other,))
-        return self
-
-    def __add__(self, other):
-        """Add an environment or distribution to an environment"""
-        new = self.__class__([], platform=None, python=None)
-        for env in self, other:
-            new += env
-        return new
-
-
-AvailableDistributions = Environment    # XXX backward compatibility
-
-
-class ExtractionError(RuntimeError):
-    """An error occurred extracting a resource
-
-    The following attributes are available from instances of this exception:
-
-    manager
-        The resource manager that raised this exception
-
-    cache_path
-        The base directory for resource extraction
-
-    original_error
-        The exception instance that caused extraction to fail
-    """
-
-
-
-
-class ResourceManager:
-    """Manage resource extraction and packages"""
-    extraction_path = None
-
-    def __init__(self):
-        self.cached_files = {}
-
-    def resource_exists(self, package_or_requirement, resource_name):
-        """Does the named resource exist?"""
-        return get_provider(package_or_requirement).has_resource(resource_name)
-
-    def resource_isdir(self, package_or_requirement, resource_name):
-        """Is the named resource an existing directory?"""
-        return get_provider(package_or_requirement).resource_isdir(
-            resource_name
-        )
-
-    def resource_filename(self, package_or_requirement, resource_name):
-        """Return a true filesystem path for specified resource"""
-        return get_provider(package_or_requirement).get_resource_filename(
-            self, resource_name
-        )
-
-    def resource_stream(self, package_or_requirement, resource_name):
-        """Return a readable file-like object for specified resource"""
-        return get_provider(package_or_requirement).get_resource_stream(
-            self, resource_name
-        )
-
-    def resource_string(self, package_or_requirement, resource_name):
-        """Return specified resource as a string"""
-        return get_provider(package_or_requirement).get_resource_string(
-            self, resource_name
-        )
-
-    def resource_listdir(self, package_or_requirement, resource_name):
-        """List the contents of the named resource directory"""
-        return get_provider(package_or_requirement).resource_listdir(
-            resource_name
-        )
-
-    def extraction_error(self):
-        """Give an error message for problems extracting file(s)"""
-
-        old_exc = sys.exc_info()[1]
-        cache_path = self.extraction_path or get_default_cache()
-
-        err = ExtractionError("""Can't extract file(s) to egg cache
-
-The following error occurred while trying to extract file(s) to the Python egg
-cache:
-
-  %s
-
-The Python egg cache directory is currently set to:
-
-  %s
-
-Perhaps your account does not have write access to this directory?  You can
-change the cache directory by setting the PYTHON_EGG_CACHE environment
-variable to point to an accessible directory.
-"""         % (old_exc, cache_path)
-        )
-        err.manager        = self
-        err.cache_path     = cache_path
-        err.original_error = old_exc
-        raise err
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def get_cache_path(self, archive_name, names=()):
-        """Return absolute location in cache for `archive_name` and `names`
-
-        The parent directory of the resulting path will be created if it does
-        not already exist.  `archive_name` should be the base filename of the
-        enclosing egg (which may not be the name of the enclosing zipfile!),
-        including its ".egg" extension.  `names`, if provided, should be a
-        sequence of path name parts "under" the egg's extraction location.
-
-        This method should only be called by resource providers that need to
-        obtain an extraction location, and only for names they intend to
-        extract, as it tracks the generated names for possible cleanup later.
-        """
-        extract_path = self.extraction_path or get_default_cache()
-        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
-        try:
-            _bypass_ensure_directory(target_path)
-        except:
-            self.extraction_error()
-
-        self.cached_files[target_path] = 1
-        return target_path
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def postprocess(self, tempname, filename):
-        """Perform any platform-specific postprocessing of `tempname`
-
-        This is where Mac header rewrites should be done; other platforms don't
-        have anything special they should do.
-
-        Resource providers should call this method ONLY after successfully
-        extracting a compressed resource.  They must NOT call it on resources
-        that are already in the filesystem.
-
-        `tempname` is the current (temporary) name of the file, and `filename`
-        is the name it will be renamed to by the caller after this routine
-        returns.
-        """
-
-        if os.name == 'posix':
-            # Make the resource executable
-            mode = ((os.stat(tempname).st_mode) | 0555) & 07777
-            os.chmod(tempname, mode)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def set_extraction_path(self, path):
-        """Set the base path where resources will be extracted to, if needed.
-
-        If you do not call this routine before any extractions take place, the
-        path defaults to the return value of ``get_default_cache()``.  (Which
-        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
-        platform-specific fallbacks.  See that routine's documentation for more
-        details.)
-
-        Resources are extracted to subdirectories of this path based upon
-        information given by the ``IResourceProvider``.  You may set this to a
-        temporary directory, but then you must call ``cleanup_resources()`` to
-        delete the extracted files when done.  There is no guarantee that
-        ``cleanup_resources()`` will be able to remove all extracted files.
-
-        (Note: you may not change the extraction path for a given resource
-        manager once resources have been extracted, unless you first call
-        ``cleanup_resources()``.)
-        """
-        if self.cached_files:
-            raise ValueError(
-                "Can't change extraction path, files already extracted"
-            )
-
-        self.extraction_path = path
-
-    def cleanup_resources(self, force=False):
-        """
-        Delete all extracted resource files and directories, returning a list
-        of the file and directory names that could not be successfully removed.
-        This function does not have any concurrency protection, so it should
-        generally only be called when the extraction path is a temporary
-        directory exclusive to a single process.  This method is not
-        automatically called; you must call it explicitly or register it as an
-        ``atexit`` function if you wish to ensure cleanup of a temporary
-        directory used for extractions.
-        """
-        # XXX
-
-
-
-def get_default_cache():
-    """Determine the default cache location
-
-    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
-    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
-    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
-    """
-    try:
-        return os.environ['PYTHON_EGG_CACHE']
-    except KeyError:
-        pass
-
-    if os.name!='nt':
-        return os.path.expanduser('~/.python-eggs')
-
-    app_data = 'Application Data'   # XXX this may be locale-specific!
-    app_homes = [
-        (('APPDATA',), None),       # best option, should be locale-safe
-        (('USERPROFILE',), app_data),
-        (('HOMEDRIVE','HOMEPATH'), app_data),
-        (('HOMEPATH',), app_data),
-        (('HOME',), None),
-        (('WINDIR',), app_data),    # 95/98/ME
-    ]
-
-    for keys, subdir in app_homes:
-        dirname = ''
-        for key in keys:
-            if key in os.environ:
-                dirname = os.path.join(dirname, os.environ[key])
-            else:
-                break
-        else:
-            if subdir:
-                dirname = os.path.join(dirname,subdir)
-            return os.path.join(dirname, 'Python-Eggs')
-    else:
-        raise RuntimeError(
-            "Please set the PYTHON_EGG_CACHE enviroment variable"
-        )
-
-def safe_name(name):
-    """Convert an arbitrary string to a standard distribution name
-
-    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
-    """
-    return re.sub('[^A-Za-z0-9.]+', '-', name)
-
-
-def safe_version(version):
-    """Convert an arbitrary string to a standard version string
-
-    Spaces become dots, and all other non-alphanumeric characters become
-    dashes, with runs of multiple dashes condensed to a single dash.
-    """
-    version = version.replace(' ','.')
-    return re.sub('[^A-Za-z0-9.]+', '-', version)
-
-
-def safe_extra(extra):
-    """Convert an arbitrary string to a standard 'extra' name
-
-    Any runs of non-alphanumeric characters are replaced with a single '_',
-    and the result is always lowercased.
-    """
-    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
-
-
-def to_filename(name):
-    """Convert a project or version name to its filename-escaped form
-
-    Any '-' characters are currently replaced with '_'.
-    """
-    return name.replace('-','_')
-
-
-
-
-
-
-
-
-class NullProvider:
-    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
-
-    egg_name = None
-    egg_info = None
-    loader = None
-
-    def __init__(self, module):
-        self.loader = getattr(module, '__loader__', None)
-        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
-
-    def get_resource_filename(self, manager, resource_name):
-        return self._fn(self.module_path, resource_name)
-
-    def get_resource_stream(self, manager, resource_name):
-        return StringIO(self.get_resource_string(manager, resource_name))
-
-    def get_resource_string(self, manager, resource_name):
-        return self._get(self._fn(self.module_path, resource_name))
-
-    def has_resource(self, resource_name):
-        return self._has(self._fn(self.module_path, resource_name))
-
-    def has_metadata(self, name):
-        return self.egg_info and self._has(self._fn(self.egg_info,name))
-
-    if sys.version_info <= (3,):
-        def get_metadata(self, name):
-            if not self.egg_info:
-                return ""
-            return self._get(self._fn(self.egg_info,name))
-    else:
-        def get_metadata(self, name):
-            if not self.egg_info:
-                return ""
-            return self._get(self._fn(self.egg_info,name)).decode("utf-8")
-
-    def get_metadata_lines(self, name):
-        return yield_lines(self.get_metadata(name))
-
-    def resource_isdir(self,resource_name):
-        return self._isdir(self._fn(self.module_path, resource_name))
-
-    def metadata_isdir(self,name):
-        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
-
-
-    def resource_listdir(self,resource_name):
-        return self._listdir(self._fn(self.module_path,resource_name))
-
-    def metadata_listdir(self,name):
-        if self.egg_info:
-            return self._listdir(self._fn(self.egg_info,name))
-        return []
-
-    def run_script(self,script_name,namespace):
-        script = 'scripts/'+script_name
-        if not self.has_metadata(script):
-            raise ResolutionError("No script named %r" % script_name)
-        script_text = self.get_metadata(script).replace('\r\n','\n')
-        script_text = script_text.replace('\r','\n')
-        script_filename = self._fn(self.egg_info,script)
-        namespace['__file__'] = script_filename
-        if os.path.exists(script_filename):
-            execfile(script_filename, namespace, namespace)
-        else:
-            from linecache import cache
-            cache[script_filename] = (
-                len(script_text), 0, script_text.split('\n'), script_filename
-            )
-            script_code = compile(script_text,script_filename,'exec')
-            exec script_code in namespace, namespace
-
-    def _has(self, path):
-        raise NotImplementedError(
-            "Can't perform this operation for unregistered loader type"
-        )
-
-    def _isdir(self, path):
-        raise NotImplementedError(
-            "Can't perform this operation for unregistered loader type"
-        )
-
-    def _listdir(self, path):
-        raise NotImplementedError(
-            "Can't perform this operation for unregistered loader type"
-        )
-
-    def _fn(self, base, resource_name):
-        if resource_name:
-            return os.path.join(base, *resource_name.split('/'))
-        return base
-
-    def _get(self, path):
-        if hasattr(self.loader, 'get_data'):
-            return self.loader.get_data(path)
-        raise NotImplementedError(
-            "Can't perform this operation for loaders without 'get_data()'"
-        )
-
-register_loader_type(object, NullProvider)
-
-
-class EggProvider(NullProvider):
-    """Provider based on a virtual filesystem"""
-
-    def __init__(self,module):
-        NullProvider.__init__(self,module)
-        self._setup_prefix()
-
-    def _setup_prefix(self):
-        # we assume here that our metadata may be nested inside a "basket"
-        # of multiple eggs; that's why we use module_path instead of .archive
-        path = self.module_path
-        old = None
-        while path!=old:
-            if path.lower().endswith('.egg'):
-                self.egg_name = os.path.basename(path)
-                self.egg_info = os.path.join(path, 'EGG-INFO')
-                self.egg_root = path
-                break
-            old = path
-            path, base = os.path.split(path)
-
-
-
-
-
-
-class DefaultProvider(EggProvider):
-    """Provides access to package resources in the filesystem"""
-
-    def _has(self, path):
-        return os.path.exists(path)
-
-    def _isdir(self,path):
-        return os.path.isdir(path)
-
-    def _listdir(self,path):
-        return os.listdir(path)
-
-    def get_resource_stream(self, manager, resource_name):
-        return open(self._fn(self.module_path, resource_name), 'rb')
-
-    def _get(self, path):
-        stream = open(path, 'rb')
-        try:
-            return stream.read()
-        finally:
-            stream.close()
-
-register_loader_type(type(None), DefaultProvider)
-
-if importlib_bootstrap is not None:
-    register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider)
-
-
-class EmptyProvider(NullProvider):
-    """Provider that returns nothing for all requests"""
-
-    _isdir = _has = lambda self,path: False
-    _get          = lambda self,path: ''
-    _listdir      = lambda self,path: []
-    module_path   = None
-
-    def __init__(self):
-        pass
-
-empty_provider = EmptyProvider()
-
-
-
-
-class ZipProvider(EggProvider):
-    """Resource support for zips and eggs"""
-
-    eagers = None
-
-    def __init__(self, module):
-        EggProvider.__init__(self,module)
-        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
-        self.zip_pre = self.loader.archive+os.sep
-
-    def _zipinfo_name(self, fspath):
-        # Convert a virtual filename (full path to file) into a zipfile subpath
-        # usable with the zipimport directory cache for our target archive
-        if fspath.startswith(self.zip_pre):
-            return fspath[len(self.zip_pre):]
-        raise AssertionError(
-            "%s is not a subpath of %s" % (fspath,self.zip_pre)
-        )
-
-    def _parts(self,zip_path):
-        # Convert a zipfile subpath into an egg-relative path part list
-        fspath = self.zip_pre+zip_path  # pseudo-fs path
-        if fspath.startswith(self.egg_root+os.sep):
-            return fspath[len(self.egg_root)+1:].split(os.sep)
-        raise AssertionError(
-            "%s is not a subpath of %s" % (fspath,self.egg_root)
-        )
-
-    def get_resource_filename(self, manager, resource_name):
-        if not self.egg_name:
-            raise NotImplementedError(
-                "resource_filename() only supported for .egg, not .zip"
-            )
-        # no need to lock for extraction, since we use temp names
-        zip_path = self._resource_to_zip(resource_name)
-        eagers = self._get_eager_resources()
-        if '/'.join(self._parts(zip_path)) in eagers:
-            for name in eagers:
-                self._extract_resource(manager, self._eager_to_zip(name))
-        return self._extract_resource(manager, zip_path)
-
-    def _extract_resource(self, manager, zip_path):
-
-        if zip_path in self._index():
-            for name in self._index()[zip_path]:
-                last = self._extract_resource(
-                    manager, os.path.join(zip_path, name)
-                )
-            return os.path.dirname(last)  # return the extracted directory name
-
-        zip_stat = self.zipinfo[zip_path]
-        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
-        date_time = (
-            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
-            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
-        )
-        timestamp = time.mktime(date_time)
-
-        try:
-            if not WRITE_SUPPORT:
-                raise IOError('"os.rename" and "os.unlink" are not supported '
-                              'on this platform')
-
-            real_path = manager.get_cache_path(
-                self.egg_name, self._parts(zip_path)
-            )
-
-            if os.path.isfile(real_path):
-                stat = os.stat(real_path)
-                if stat.st_size==size and stat.st_mtime==timestamp:
-                    # size and stamp match, don't bother extracting
-                    return real_path
-
-            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
-            os.write(outf, self.loader.get_data(zip_path))
-            os.close(outf)
-            utime(tmpnam, (timestamp,timestamp))
-            manager.postprocess(tmpnam, real_path)
-
-            try:
-                rename(tmpnam, real_path)
-
-            except os.error:
-                if os.path.isfile(real_path):
-                    stat = os.stat(real_path)
-
-                    if stat.st_size==size and stat.st_mtime==timestamp:
-                        # size and stamp match, somebody did it just ahead of
-                        # us, so we're done
-                        return real_path
-                    elif os.name=='nt':     # Windows, del old file and retry
-                        unlink(real_path)
-                        rename(tmpnam, real_path)
-                        return real_path
-                raise
-
-        except os.error:
-            manager.extraction_error()  # report a user-friendly error
-
-        return real_path
-
-    def _get_eager_resources(self):
-        if self.eagers is None:
-            eagers = []
-            for name in ('native_libs.txt', 'eager_resources.txt'):
-                if self.has_metadata(name):
-                    eagers.extend(self.get_metadata_lines(name))
-            self.eagers = eagers
-        return self.eagers
-
-    def _index(self):
-        try:
-            return self._dirindex
-        except AttributeError:
-            ind = {}
-            for path in self.zipinfo:
-                parts = path.split(os.sep)
-                while parts:
-                    parent = os.sep.join(parts[:-1])
-                    if parent in ind:
-                        ind[parent].append(parts[-1])
-                        break
-                    else:
-                        ind[parent] = [parts.pop()]
-            self._dirindex = ind
-            return ind
-
-    def _has(self, fspath):
-        zip_path = self._zipinfo_name(fspath)
-        return zip_path in self.zipinfo or zip_path in self._index()
-
-    def _isdir(self,fspath):
-        return self._zipinfo_name(fspath) in self._index()
-
-    def _listdir(self,fspath):
-        return list(self._index().get(self._zipinfo_name(fspath), ()))
-
-    def _eager_to_zip(self,resource_name):
-        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
-
-    def _resource_to_zip(self,resource_name):
-        return self._zipinfo_name(self._fn(self.module_path,resource_name))
-
-register_loader_type(zipimport.zipimporter, ZipProvider)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class FileMetadata(EmptyProvider):
-    """Metadata handler for standalone PKG-INFO files
-
-    Usage::
-
-        metadata = FileMetadata("/path/to/PKG-INFO")
-
-    This provider rejects all data and metadata requests except for PKG-INFO,
-    which is treated as existing, and will be the contents of the file at
-    the provided location.
-    """
-
-    def __init__(self,path):
-        self.path = path
-
-    def has_metadata(self,name):
-        return name=='PKG-INFO'
-
-    def get_metadata(self,name):
-        if name=='PKG-INFO':
-            f = open(self.path,'rU')
-            metadata = f.read()
-            f.close()
-            return metadata
-        raise KeyError("No metadata except PKG-INFO is available")
-
-    def get_metadata_lines(self,name):
-        return yield_lines(self.get_metadata(name))
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class PathMetadata(DefaultProvider):
-    """Metadata provider for egg directories
-
-    Usage::
-
-        # Development eggs:
-
-        egg_info = "/path/to/PackageName.egg-info"
-        base_dir = os.path.dirname(egg_info)
-        metadata = PathMetadata(base_dir, egg_info)
-        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
-        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
-
-        # Unpacked egg directories:
-
-        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
-        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
-        dist = Distribution.from_filename(egg_path, metadata=metadata)
-    """
-
-    def __init__(self, path, egg_info):
-        self.module_path = path
-        self.egg_info = egg_info
-
-
-class EggMetadata(ZipProvider):
-    """Metadata provider for .egg files"""
-
-    def __init__(self, importer):
-        """Create a metadata provider from a zipimporter"""
-
-        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
-        self.zip_pre = importer.archive+os.sep
-        self.loader = importer
-        if importer.prefix:
-            self.module_path = os.path.join(importer.archive, importer.prefix)
-        else:
-            self.module_path = importer.archive
-        self._setup_prefix()
-
-
-class ImpWrapper:
-    """PEP 302 Importer that wraps Python's "normal" import algorithm"""
-
-    def __init__(self, path=None):
-        self.path = path
-
-    def find_module(self, fullname, path=None):
-        subname = fullname.split(".")[-1]
-        if subname != fullname and self.path is None:
-            return None
-        if self.path is None:
-            path = None
-        else:
-            path = [self.path]
-        try:
-            file, filename, etc = imp.find_module(subname, path)
-        except ImportError:
-            return None
-        return ImpLoader(file, filename, etc)
-
-
-class ImpLoader:
-    """PEP 302 Loader that wraps Python's "normal" import algorithm"""
-
-    def __init__(self, file, filename, etc):
-        self.file = file
-        self.filename = filename
-        self.etc = etc
-
-    def load_module(self, fullname):
-        try:
-            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
-        finally:
-            if self.file: self.file.close()
-        # Note: we don't set __loader__ because we want the module to look
-        # normal; i.e. this is just a wrapper for standard import machinery
-        return mod
-
-
-
-
-def get_importer(path_item):
-    """Retrieve a PEP 302 "importer" for the given path item
-
-    If there is no importer, this returns a wrapper around the builtin import
-    machinery.  The returned importer is only cached if it was created by a
-    path hook.
-    """
-    try:
-        importer = sys.path_importer_cache[path_item]
-    except KeyError:
-        for hook in sys.path_hooks:
-            try:
-                importer = hook(path_item)
-            except ImportError:
-                pass
-            else:
-                break
-        else:
-            importer = None
-
-    sys.path_importer_cache.setdefault(path_item,importer)
-    if importer is None:
-        try:
-            importer = ImpWrapper(path_item)
-        except ImportError:
-            pass
-    return importer
-
-try:
-    from pkgutil import get_importer, ImpImporter
-except ImportError:
-    pass    # Python 2.3 or 2.4, use our own implementation
-else:
-    ImpWrapper = ImpImporter    # Python 2.5, use pkgutil's implementation
-    del ImpLoader, ImpImporter
-
-
-
-
-
-
-_declare_state('dict', _distribution_finders = {})
-
-def register_finder(importer_type, distribution_finder):
-    """Register `distribution_finder` to find distributions in sys.path items
-
-    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
-    handler), and `distribution_finder` is a callable that, passed a path
-    item and the importer instance, yields ``Distribution`` instances found on
-    that path item.  See ``pkg_resources.find_on_path`` for an example."""
-    _distribution_finders[importer_type] = distribution_finder
-
-
-def find_distributions(path_item, only=False):
-    """Yield distributions accessible via `path_item`"""
-    importer = get_importer(path_item)
-    finder = _find_adapter(_distribution_finders, importer)
-    return finder(importer, path_item, only)
-
-def find_in_zip(importer, path_item, only=False):
-    metadata = EggMetadata(importer)
-    if metadata.has_metadata('PKG-INFO'):
-        yield Distribution.from_filename(path_item, metadata=metadata)
-    if only:
-        return  # don't yield nested distros
-    for subitem in metadata.resource_listdir('/'):
-        if subitem.endswith('.egg'):
-            subpath = os.path.join(path_item, subitem)
-            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
-                yield dist
-
-register_finder(zipimport.zipimporter, find_in_zip)
-
-def StringIO(*args, **kw):
-    """Thunk to load the real StringIO on demand"""
-    global StringIO
-    try:
-        from cStringIO import StringIO
-    except ImportError:
-        from StringIO import StringIO
-    return StringIO(*args,**kw)
-
-def find_nothing(importer, path_item, only=False):
-    return ()
-register_finder(object,find_nothing)
-
-def find_on_path(importer, path_item, only=False):
-    """Yield distributions accessible on a sys.path directory"""
-    path_item = _normalize_cached(path_item)
-
-    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
-        if path_item.lower().endswith('.egg'):
-            # unpacked egg
-            yield Distribution.from_filename(
-                path_item, metadata=PathMetadata(
-                    path_item, os.path.join(path_item,'EGG-INFO')
-                )
-            )
-        else:
-            # scan for .egg and .egg-info in directory
-            for entry in os.listdir(path_item):
-                lower = entry.lower()
-                if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
-                    fullpath = os.path.join(path_item, entry)
-                    if os.path.isdir(fullpath):
-                        # egg-info directory, allow getting metadata
-                        metadata = PathMetadata(path_item, fullpath)
-                    else:
-                        metadata = FileMetadata(fullpath)
-                    yield Distribution.from_location(
-                        path_item,entry,metadata,precedence=DEVELOP_DIST
-                    )
-                elif not only and lower.endswith('.egg'):
-                    for dist in find_distributions(os.path.join(path_item, entry)):
-                        yield dist
-                elif not only and lower.endswith('.egg-link'):
-                    entry_file = open(os.path.join(path_item, entry))
-                    try:
-                        entry_lines = entry_file.readlines()
-                    finally:
-                        entry_file.close()
-                    for line in entry_lines:
-                        if not line.strip(): continue
-                        for item in find_distributions(os.path.join(path_item,line.rstrip())):
-                            yield item
-                        break
-register_finder(ImpWrapper,find_on_path)
-
-if importlib_bootstrap is not None:
-    register_finder(importlib_bootstrap.FileFinder, find_on_path)
-
-_declare_state('dict', _namespace_handlers={})
-_declare_state('dict', _namespace_packages={})
-
-
-def register_namespace_handler(importer_type, namespace_handler):
-    """Register `namespace_handler` to declare namespace packages
-
-    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
-    handler), and `namespace_handler` is a callable like this::
-
-        def namespace_handler(importer,path_entry,moduleName,module):
-            # return a path_entry to use for child packages
-
-    Namespace handlers are only called if the importer object has already
-    agreed that it can handle the relevant path item, and they should only
-    return a subpath if the module __path__ does not already contain an
-    equivalent subpath.  For an example namespace handler, see
-    ``pkg_resources.file_ns_handler``.
-    """
-    _namespace_handlers[importer_type] = namespace_handler
-
-def _handle_ns(packageName, path_item):
-    """Ensure that named package includes a subpath of path_item (if needed)"""
-    importer = get_importer(path_item)
-    if importer is None:
-        return None
-    loader = importer.find_module(packageName)
-    if loader is None:
-        return None
-    module = sys.modules.get(packageName)
-    if module is None:
-        module = sys.modules[packageName] = types.ModuleType(packageName)
-        module.__path__ = []; _set_parent_ns(packageName)
-    elif not hasattr(module,'__path__'):
-        raise TypeError("Not a package:", packageName)
-    handler = _find_adapter(_namespace_handlers, importer)
-    subpath = handler(importer,path_item,packageName,module)
-    if subpath is not None:
-        path = module.__path__; path.append(subpath)
-        loader.load_module(packageName); module.__path__ = path
-    return subpath
-
-def declare_namespace(packageName):
-    """Declare that package 'packageName' is a namespace package"""
-
-    imp.acquire_lock()
-    try:
-        if packageName in _namespace_packages:
-            return
-
-        path, parent = sys.path, None
-        if '.' in packageName:
-            parent = '.'.join(packageName.split('.')[:-1])
-            declare_namespace(parent)
-            if parent not in _namespace_packages:
-                __import__(parent)
-            try:
-                path = sys.modules[parent].__path__
-            except AttributeError:
-                raise TypeError("Not a package:", parent)
-
-        # Track what packages are namespaces, so when new path items are added,
-        # they can be updated
-        _namespace_packages.setdefault(parent,[]).append(packageName)
-        _namespace_packages.setdefault(packageName,[])
-
-        for path_item in path:
-            # Ensure all the parent's path items are reflected in the child,
-            # if they apply
-            _handle_ns(packageName, path_item)
-
-    finally:
-        imp.release_lock()
-
-def fixup_namespace_packages(path_item, parent=None):
-    """Ensure that previously-declared namespace packages include path_item"""
-    imp.acquire_lock()
-    try:
-        for package in _namespace_packages.get(parent,()):
-            subpath = _handle_ns(package, path_item)
-            if subpath: fixup_namespace_packages(subpath,package)
-    finally:
-        imp.release_lock()
-
-def file_ns_handler(importer, path_item, packageName, module):
-    """Compute an ns-package subpath for a filesystem or zipfile importer"""
-
-    subpath = os.path.join(path_item, packageName.split('.')[-1])
-    normalized = _normalize_cached(subpath)
-    for item in module.__path__:
-        if _normalize_cached(item)==normalized:
-            break
-    else:
-        # Only return the path if it's not already there
-        return subpath
-
-register_namespace_handler(ImpWrapper,file_ns_handler)
-register_namespace_handler(zipimport.zipimporter,file_ns_handler)
-
-if importlib_bootstrap is not None:
-    register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler)
-
-
-def null_ns_handler(importer, path_item, packageName, module):
-    return None
-
-register_namespace_handler(object,null_ns_handler)
-
-
-def normalize_path(filename):
-    """Normalize a file/dir name for comparison purposes"""
-    return os.path.normcase(os.path.realpath(filename))
-
-def _normalize_cached(filename,_cache={}):
-    try:
-        return _cache[filename]
-    except KeyError:
-        _cache[filename] = result = normalize_path(filename)
-        return result
-
-def _set_parent_ns(packageName):
-    parts = packageName.split('.')
-    name = parts.pop()
-    if parts:
-        parent = '.'.join(parts)
-        setattr(sys.modules[parent], name, sys.modules[packageName])
-
-
-def yield_lines(strs):
-    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
-    if isinstance(strs,basestring):
-        for s in strs.splitlines():
-            s = s.strip()
-            if s and not s.startswith('#'):     # skip blank lines/comments
-                yield s
-    else:
-        for ss in strs:
-            for s in yield_lines(ss):
-                yield s
-
-LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
-CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
-DISTRO   = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
-VERSION  = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
-COMMA    = re.compile(r"\s*,").match               # comma between items
-OBRACKET = re.compile(r"\s*\[").match
-CBRACKET = re.compile(r"\s*\]").match
-MODULE   = re.compile(r"\w+(\.\w+)*$").match
-EGG_NAME = re.compile(
-    r"(?P<name>[^-]+)"
-    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
-    re.VERBOSE | re.IGNORECASE
-).match
-
-component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
-replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
-
-def _parse_version_parts(s):
-    for part in component_re.split(s):
-        part = replace(part,part)
-        if part in ['', '.']:
-            continue
-        if part[:1] in '0123456789':
-            yield part.zfill(8)    # pad for numeric comparison
-        else:
-            yield '*'+part
-
-    yield '*final'  # ensure that alpha/beta/candidate are before final
-
-def parse_version(s):
-    """Convert a version string to a chronologically-sortable key
-
-    This is a rough cross between distutils' StrictVersion and LooseVersion;
-    if you give it versions that would work with StrictVersion, then it behaves
-    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
-    *possible* to create pathological version coding schemes that will fool
-    this parser, but they should be very rare in practice.
-
-    The returned value will be a tuple of strings.  Numeric portions of the
-    version are padded to 8 digits so they will compare numerically, but
-    without relying on how numbers compare relative to strings.  Dots are
-    dropped, but dashes are retained.  Trailing zeros between alpha segments
-    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
-    "2.4". Alphanumeric parts are lower-cased.
-
-    The algorithm assumes that strings like "-" and any alpha string that
-    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
-    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
-    considered newer than "2.4-1", which in turn is newer than "2.4".
-
-    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
-    come before "final" alphabetically) are assumed to be pre-release versions,
-    so that the version "2.4" is considered newer than "2.4a1".
-
-    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
-    "rc" are treated as if they were "c", i.e. as though they were release
-    candidates, and therefore are not as new as a version string that does not
-    contain them, and "dev" is replaced with an '@' so that it sorts lower than
-    than any other pre-release tag.
-    """
-    parts = []
-    for part in _parse_version_parts(s.lower()):
-        if part.startswith('*'):
-            # remove trailing zeros from each series of numeric parts
-            while parts and parts[-1]=='00000000':
-                parts.pop()
-        parts.append(part)
-    return tuple(parts)
-
-class EntryPoint(object):
-    """Object representing an advertised importable object"""
-
-    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
-        if not MODULE(module_name):
-            raise ValueError("Invalid module name", module_name)
-        self.name = name
-        self.module_name = module_name
-        self.attrs = tuple(attrs)
-        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
-        self.dist = dist
-
-    def __str__(self):
-        s = "%s = %s" % (self.name, self.module_name)
-        if self.attrs:
-            s += ':' + '.'.join(self.attrs)
-        if self.extras:
-            s += ' [%s]' % ','.join(self.extras)
-        return s
-
-    def __repr__(self):
-        return "EntryPoint.parse(%r)" % str(self)
-
-    def load(self, require=True, env=None, installer=None):
-        if require: self.require(env, installer)
-        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
-        for attr in self.attrs:
-            try:
-                entry = getattr(entry,attr)
-            except AttributeError:
-                raise ImportError("%r has no %r attribute" % (entry,attr))
-        return entry
-
-    def require(self, env=None, installer=None):
-        if self.extras and not self.dist:
-            raise UnknownExtra("Can't require() without a distribution", self)
-        map(working_set.add,
-            working_set.resolve(self.dist.requires(self.extras),env,installer))
-
-
-
-    #@classmethod
-    def parse(cls, src, dist=None):
-        """Parse a single entry point from string `src`
-
-        Entry point syntax follows the form::
-
-            name = some.module:some.attr [extra1,extra2]
-
-        The entry name and module name are required, but the ``:attrs`` and
-        ``[extras]`` parts are optional
-        """
-        try:
-            attrs = extras = ()
-            name,value = src.split('=',1)
-            if '[' in value:
-                value,extras = value.split('[',1)
-                req = Requirement.parse("x["+extras)
-                if req.specs: raise ValueError
-                extras = req.extras
-            if ':' in value:
-                value,attrs = value.split(':',1)
-                if not MODULE(attrs.rstrip()):
-                    raise ValueError
-                attrs = attrs.rstrip().split('.')
-        except ValueError:
-            raise ValueError(
-                "EntryPoint must be in 'name=module:attrs [extras]' format",
-                src
-            )
-        else:
-            return cls(name.strip(), value.strip(), attrs, extras, dist)
-
-    parse = classmethod(parse)
-
-
-
-
-
-
-
-
-    #@classmethod
-    def parse_group(cls, group, lines, dist=None):
-        """Parse an entry point group"""
-        if not MODULE(group):
-            raise ValueError("Invalid group name", group)
-        this = {}
-        for line in yield_lines(lines):
-            ep = cls.parse(line, dist)
-            if ep.name in this:
-                raise ValueError("Duplicate entry point", group, ep.name)
-            this[ep.name]=ep
-        return this
-
-    parse_group = classmethod(parse_group)
-
-    #@classmethod
-    def parse_map(cls, data, dist=None):
-        """Parse a map of entry point groups"""
-        if isinstance(data,dict):
-            data = data.items()
-        else:
-            data = split_sections(data)
-        maps = {}
-        for group, lines in data:
-            if group is None:
-                if not lines:
-                    continue
-                raise ValueError("Entry points must be listed in groups")
-            group = group.strip()
-            if group in maps:
-                raise ValueError("Duplicate group name", group)
-            maps[group] = cls.parse_group(group, lines, dist)
-        return maps
-
-    parse_map = classmethod(parse_map)
-
-
-def _remove_md5_fragment(location):
-    if not location:
-        return ''
-    parsed = urlparse(location)
-    if parsed[-1].startswith('md5='):
-        return urlunparse(parsed[:-1] + ('',))
-    return location
-
-
-class Distribution(object):
-    """Wrap an actual or potential sys.path entry w/metadata"""
-    PKG_INFO = 'PKG-INFO'
-
-    def __init__(self,
-        location=None, metadata=None, project_name=None, version=None,
-        py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
-    ):
-        self.project_name = safe_name(project_name or 'Unknown')
-        if version is not None:
-            self._version = safe_version(version)
-        self.py_version = py_version
-        self.platform = platform
-        self.location = location
-        self.precedence = precedence
-        self._provider = metadata or empty_provider
-
-    #@classmethod
-    def from_location(cls,location,basename,metadata=None,**kw):
-        project_name, version, py_version, platform = [None]*4
-        basename, ext = os.path.splitext(basename)
-        if ext.lower() in _distributionImpl:
-            # .dist-info gets much metadata differently
-            match = EGG_NAME(basename)
-            if match:
-                project_name, version, py_version, platform = match.group(
-                    'name','ver','pyver','plat'
-                )
-            cls = _distributionImpl[ext.lower()]
-        return cls(
-            location, metadata, project_name=project_name, version=version,
-            py_version=py_version, platform=platform, **kw
-        )
-    from_location = classmethod(from_location)
-
-
-    hashcmp = property(
-        lambda self: (
-            getattr(self,'parsed_version',()),
-            self.precedence,
-            self.key,
-            _remove_md5_fragment(self.location),
-            self.py_version,
-            self.platform
-        )
-    )
-    def __hash__(self): return hash(self.hashcmp)
-    def __lt__(self, other):
-        return self.hashcmp < other.hashcmp
-    def __le__(self, other):
-        return self.hashcmp <= other.hashcmp
-    def __gt__(self, other):
-        return self.hashcmp > other.hashcmp
-    def __ge__(self, other):
-        return self.hashcmp >= other.hashcmp
-    def __eq__(self, other):
-        if not isinstance(other, self.__class__):
-            # It's not a Distribution, so they are not equal
-            return False
-        return self.hashcmp == other.hashcmp
-    def __ne__(self, other):
-        return not self == other
-
-    # These properties have to be lazy so that we don't have to load any
-    # metadata until/unless it's actually needed.  (i.e., some distributions
-    # may not know their name or version without loading PKG-INFO)
-
-    #@property
-    def key(self):
-        try:
-            return self._key
-        except AttributeError:
-            self._key = key = self.project_name.lower()
-            return key
-    key = property(key)
-
-    #@property
-    def parsed_version(self):
-        try:
-            return self._parsed_version
-        except AttributeError:
-            self._parsed_version = pv = parse_version(self.version)
-            return pv
-
-    parsed_version = property(parsed_version)
-
-    #@property
-    def version(self):
-        try:
-            return self._version
-        except AttributeError:
-            for line in self._get_metadata(self.PKG_INFO):
-                if line.lower().startswith('version:'):
-                    self._version = safe_version(line.split(':',1)[1].strip())
-                    return self._version
-            else:
-                raise ValueError(
-                    "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self
-                )
-    version = property(version)
-
-
-
-
-    #@property
-    def _dep_map(self):
-        try:
-            return self.__dep_map
-        except AttributeError:
-            dm = self.__dep_map = {None: []}
-            for name in 'requires.txt', 'depends.txt':
-                for extra,reqs in split_sections(self._get_metadata(name)):
-                    if extra: extra = safe_extra(extra)
-                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
-            return dm
-    _dep_map = property(_dep_map)
-
-    def requires(self,extras=()):
-        """List of Requirements needed for this distro if `extras` are used"""
-        dm = self._dep_map
-        deps = []
-        deps.extend(dm.get(None,()))
-        for ext in extras:
-            try:
-                deps.extend(dm[safe_extra(ext)])
-            except KeyError:
-                raise UnknownExtra(
-                    "%s has no such extra feature %r" % (self, ext)
-                )
-        return deps
-
-    def _get_metadata(self,name):
-        if self.has_metadata(name):
-            for line in self.get_metadata_lines(name):
-                yield line
-
-    def activate(self,path=None):
-        """Ensure distribution is importable on `path` (default=sys.path)"""
-        if path is None: path = sys.path
-        self.insert_on(path)
-        if path is sys.path:
-            fixup_namespace_packages(self.location)
-            map(declare_namespace, self._get_metadata('namespace_packages.txt'))
-
-
-    def egg_name(self):
-        """Return what this distribution's standard .egg filename should be"""
-        filename = "%s-%s-py%s" % (
-            to_filename(self.project_name), to_filename(self.version),
-            self.py_version or PY_MAJOR
-        )
-
-        if self.platform:
-            filename += '-'+self.platform
-        return filename
-
-    def __repr__(self):
-        if self.location:
-            return "%s (%s)" % (self,self.location)
-        else:
-            return str(self)
-
-    def __str__(self):
-        try: version = getattr(self,'version',None)
-        except ValueError: version = None
-        version = version or "[unknown version]"
-        return "%s %s" % (self.project_name,version)
-
-    def __getattr__(self,attr):
-        """Delegate all unrecognized public attributes to .metadata provider"""
-        if attr.startswith('_'):
-            raise AttributeError,attr
-        return getattr(self._provider, attr)
-
-    #@classmethod
-    def from_filename(cls,filename,metadata=None, **kw):
-        return cls.from_location(
-            _normalize_cached(filename), os.path.basename(filename), metadata,
-            **kw
-        )
-    from_filename = classmethod(from_filename)
-
-    def as_requirement(self):
-        """Return a ``Requirement`` that matches this distribution exactly"""
-        return Requirement.parse('%s==%s' % (self.project_name, self.version))
-
-    def load_entry_point(self, group, name):
-        """Return the `name` entry point of `group` or raise ImportError"""
-        ep = self.get_entry_info(group,name)
-        if ep is None:
-            raise ImportError("Entry point %r not found" % ((group,name),))
-        return ep.load()
-
-    def get_entry_map(self, group=None):
-        """Return the entry point map for `group`, or the full entry map"""
-        try:
-            ep_map = self._ep_map
-        except AttributeError:
-            ep_map = self._ep_map = EntryPoint.parse_map(
-                self._get_metadata('entry_points.txt'), self
-            )
-        if group is not None:
-            return ep_map.get(group,{})
-        return ep_map
-
-    def get_entry_info(self, group, name):
-        """Return the EntryPoint object for `group`+`name`, or ``None``"""
-        return self.get_entry_map(group).get(name)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def insert_on(self, path, loc = None):
-        """Insert self.location in path before its nearest parent directory"""
-
-        loc = loc or self.location
-
-        if self.project_name == 'setuptools':
-            try:
-                version = self.version
-            except ValueError:
-                version = ''
-            if '0.7' in version:
-                raise ValueError(
-                    "A 0.7-series setuptools cannot be installed "
-                    "with distribute. Found one at %s" % str(self.location))
-
-        if not loc:
-            return
-
-        if path is sys.path:
-            self.check_version_conflict()
-
-        nloc = _normalize_cached(loc)
-        bdir = os.path.dirname(nloc)
-        npath= map(_normalize_cached, path)
-
-        bp = None
-        for p, item in enumerate(npath):
-            if item==nloc:
-                break
-            elif item==bdir and self.precedence==EGG_DIST:
-                # if it's an .egg, give it precedence over its directory
-                path.insert(p, loc)
-                npath.insert(p, nloc)
-                break
-        else:
-            path.append(loc)
-            return
-
-        # p is the spot where we found or inserted loc; now remove duplicates
-        while 1:
-            try:
-                np = npath.index(nloc, p+1)
-            except ValueError:
-                break
-            else:
-                del npath[np], path[np]
-                p = np  # ha!
-
-        return
-
-
-
-    def check_version_conflict(self):
-        if self.key=='distribute':
-            return      # ignore the inevitable setuptools self-conflicts  :(
-
-        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
-        loc = normalize_path(self.location)
-        for modname in self._get_metadata('top_level.txt'):
-            if (modname not in sys.modules or modname in nsp
-                or modname in _namespace_packages
-            ):
-                continue
-            if modname in ('pkg_resources', 'setuptools', 'site'):
-                continue
-            fn = getattr(sys.modules[modname], '__file__', None)
-            if fn and (normalize_path(fn).startswith(loc) or
-                       fn.startswith(self.location)):
-                continue
-            issue_warning(
-                "Module %s was already imported from %s, but %s is being added"
-                " to sys.path" % (modname, fn, self.location),
-            )
-
-    def has_version(self):
-        try:
-            self.version
-        except ValueError:
-            issue_warning("Unbuilt egg for "+repr(self))
-            return False
-        return True
-
-    def clone(self,**kw):
-        """Copy this distribution, substituting in any changed keyword args"""
-        for attr in (
-            'project_name', 'version', 'py_version', 'platform', 'location',
-            'precedence'
-        ):
-            kw.setdefault(attr, getattr(self,attr,None))
-        kw.setdefault('metadata', self._provider)
-        return self.__class__(**kw)
-
-
-
-
-    #@property
-    def extras(self):
-        return [dep for dep in self._dep_map if dep]
-    extras = property(extras)
-
-
-class DistInfoDistribution(Distribution):
-    """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
-    PKG_INFO = 'METADATA'
-    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
-
-    @property
-    def _parsed_pkg_info(self):
-        """Parse and cache metadata"""
-        try:
-            return self._pkg_info
-        except AttributeError:
-            from email.parser import Parser
-            self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO))
-            return self._pkg_info
-
-    @property
-    def _dep_map(self):
-        try:
-            return self.__dep_map
-        except AttributeError:
-            self.__dep_map = self._compute_dependencies()
-            return self.__dep_map
-
-    def _preparse_requirement(self, requires_dist):
-        """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
-        Split environment marker, add == prefix to version specifiers as
-        necessary, and remove parenthesis.
-        """
-        parts = requires_dist.split(';', 1) + ['']
-        distvers = parts[0].strip()
-        mark = parts[1].strip()
-        distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
-        distvers = distvers.replace('(', '').replace(')', '')
-        return (distvers, mark)
-
-    def _compute_dependencies(self):
-        """Recompute this distribution's dependencies."""
-        from _markerlib import compile as compile_marker
-        dm = self.__dep_map = {None: []}
-
-        reqs = []
-        # Including any condition expressions
-        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
-            distvers, mark = self._preparse_requirement(req)
-            parsed = parse_requirements(distvers).next()
-            parsed.marker_fn = compile_marker(mark)
-            reqs.append(parsed)
-
-        def reqs_for_extra(extra):
-            for req in reqs:
-                if req.marker_fn(override={'extra':extra}):
-                    yield req
-
-        common = frozenset(reqs_for_extra(None))
-        dm[None].extend(common)
-
-        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
-            extra = safe_extra(extra.strip())
-            dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
-
-        return dm
-
-
-_distributionImpl = {'.egg': Distribution,
-                     '.egg-info': Distribution,
-                     '.dist-info': DistInfoDistribution }
-
-
-def issue_warning(*args,**kw):
-    level = 1
-    g = globals()
-    try:
-        # find the first stack frame that is *not* code in
-        # the pkg_resources module, to use for the warning
-        while sys._getframe(level).f_globals is g:
-            level += 1
-    except ValueError:
-        pass
-    from warnings import warn
-    warn(stacklevel = level+1, *args, **kw)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def parse_requirements(strs):
-    """Yield ``Requirement`` objects for each specification in `strs`
-
-    `strs` must be an instance of ``basestring``, or a (possibly-nested)
-    iterable thereof.
-    """
-    # create a steppable iterator, so we can handle \-continuations
-    lines = iter(yield_lines(strs))
-
-    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
-
-        items = []
-
-        while not TERMINATOR(line,p):
-            if CONTINUE(line,p):
-                try:
-                    line = lines.next(); p = 0
-                except StopIteration:
-                    raise ValueError(
-                        "\\ must not appear on the last nonblank line"
-                    )
-
-            match = ITEM(line,p)
-            if not match:
-                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
-
-            items.append(match.group(*groups))
-            p = match.end()
-
-            match = COMMA(line,p)
-            if match:
-                p = match.end() # skip the comma
-            elif not TERMINATOR(line,p):
-                raise ValueError(
-                    "Expected ',' or end-of-list in",line,"at",line[p:]
-                )
-
-        match = TERMINATOR(line,p)
-        if match: p = match.end()   # skip the terminator, if any
-        return line, p, items
-
-    for line in lines:
-        match = DISTRO(line)
-        if not match:
-            raise ValueError("Missing distribution spec", line)
-        project_name = match.group(1)
-        p = match.end()
-        extras = []
-
-        match = OBRACKET(line,p)
-        if match:
-            p = match.end()
-            line, p, extras = scan_list(
-                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
-            )
-
-        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
-        specs = [(op,safe_version(val)) for op,val in specs]
-        yield Requirement(project_name, specs, extras)
-
-
-def _sort_dists(dists):
-    tmp = [(dist.hashcmp,dist) for dist in dists]
-    tmp.sort()
-    dists[::-1] = [d for hc,d in tmp]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class Requirement:
-    def __init__(self, project_name, specs, extras):
-        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
-        self.unsafe_name, project_name = project_name, safe_name(project_name)
-        self.project_name, self.key = project_name, project_name.lower()
-        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
-        index.sort()
-        self.specs = [(op,ver) for parsed,trans,op,ver in index]
-        self.index, self.extras = index, tuple(map(safe_extra,extras))
-        self.hashCmp = (
-            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
-            frozenset(self.extras)
-        )
-        self.__hash = hash(self.hashCmp)
-
-    def __str__(self):
-        specs = ','.join([''.join(s) for s in self.specs])
-        extras = ','.join(self.extras)
-        if extras: extras = '[%s]' % extras
-        return '%s%s%s' % (self.project_name, extras, specs)
-
-    def __eq__(self,other):
-        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
-
-    def __contains__(self,item):
-        if isinstance(item,Distribution):
-            if item.key <> self.key: return False
-            if self.index: item = item.parsed_version  # only get if we need it
-        elif isinstance(item,basestring):
-            item = parse_version(item)
-        last = None
-        compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
-        for parsed,trans,op,ver in self.index:
-            action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
-            if action=='F':     return False
-            elif action=='T':   return True
-            elif action=='+':   last = True
-            elif action=='-' or last is None:   last = False
-        if last is None: last = True    # no rules encountered
-        return last
-
-
-    def __hash__(self):
-        return self.__hash
-
-    def __repr__(self): return "Requirement.parse(%r)" % str(self)
-
-    #@staticmethod
-    def parse(s, replacement=True):
-        reqs = list(parse_requirements(s))
-        if reqs:
-            if len(reqs) == 1:
-                founded_req = reqs[0]
-                # if asked for setuptools distribution
-                # and if distribute is installed, we want to give
-                # distribute instead
-                if _override_setuptools(founded_req) and replacement:
-                    distribute = list(parse_requirements('distribute'))
-                    if len(distribute) == 1:
-                        return distribute[0]
-                    return founded_req
-                else:
-                    return founded_req
-
-            raise ValueError("Expected only one requirement", s)
-        raise ValueError("No requirements found", s)
-
-    parse = staticmethod(parse)
-
-state_machine = {
-    #       =><
-    '<' :  '--T',
-    '<=':  'T-T',
-    '>' :  'F+F',
-    '>=':  'T+F',
-    '==':  'T..',
-    '!=':  'F++',
-}
-
-
-def _override_setuptools(req):
-    """Return True when distribute wants to override a setuptools dependency.
-
-    We want to override when the requirement is setuptools and the version is
-    a variant of 0.6.
-
-    """
-    if req.project_name == 'setuptools':
-        if not len(req.specs):
-            # Just setuptools: ok
-            return True
-        for comparator, version in req.specs:
-            if comparator in ['==', '>=', '>']:
-                if '0.7' in version:
-                    # We want some setuptools not from the 0.6 series.
-                    return False
-        return True
-    return False
-
-
-def _get_mro(cls):
-    """Get an mro for a type or classic class"""
-    if not isinstance(cls,type):
-        class cls(cls,object): pass
-        return cls.__mro__[1:]
-    return cls.__mro__
-
-def _find_adapter(registry, ob):
-    """Return an adapter factory for `ob` from `registry`"""
-    for t in _get_mro(getattr(ob, '__class__', type(ob))):
-        if t in registry:
-            return registry[t]
-
-
-def ensure_directory(path):
-    """Ensure that the parent directory of `path` exists"""
-    dirname = os.path.dirname(path)
-    if not os.path.isdir(dirname):
-        os.makedirs(dirname)
-
-def split_sections(s):
-    """Split a string or iterable thereof into (section,content) pairs
-
-    Each ``section`` is a stripped version of the section header ("[section]")
-    and each ``content`` is a list of stripped lines excluding blank lines and
-    comment-only lines.  If there are any such lines before the first section
-    header, they're returned in a first ``section`` of ``None``.
-    """
-    section = None
-    content = []
-    for line in yield_lines(s):
-        if line.startswith("["):
-            if line.endswith("]"):
-                if section or content:
-                    yield section, content
-                section = line[1:-1].strip()
-                content = []
-            else:
-                raise ValueError("Invalid section heading", line)
-        else:
-            content.append(line)
-
-    # wrap up last segment
-    yield section, content
-
-def _mkstemp(*args,**kw):
-    from tempfile import mkstemp
-    old_open = os.open
-    try:
-        os.open = os_open   # temporarily bypass sandboxing
-        return mkstemp(*args,**kw)
-    finally:
-        os.open = old_open  # and then put it back
-
-
-# Set up global resource manager (deliberately not state-saved)
-_manager = ResourceManager()
-def _initialize(g):
-    for name in dir(_manager):
-        if not name.startswith('_'):
-            g[name] = getattr(_manager, name)
-_initialize(globals())
-
-# Prepare the master working set and make the ``require()`` API available
-_declare_state('object', working_set = WorkingSet())
-
-try:
-    # Does the main program list any requirements?
-    from __main__ import __requires__
-except ImportError:
-    pass # No: just use the default working set based on sys.path
-else:
-    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
-    try:
-        working_set.require(__requires__)
-    except VersionConflict:     # try it without defaults already on sys.path
-        working_set = WorkingSet([])    # by starting with an empty path
-        for dist in working_set.resolve(
-            parse_requirements(__requires__), Environment()
-        ):
-            working_set.add(dist)
-        for entry in sys.path:  # add any missing entries from sys.path
-            if entry not in working_set.entries:
-                working_set.add_entry(entry)
-        sys.path[:] = working_set.entries   # then copy back to sys.path
-
-require = working_set.require
-iter_entry_points = working_set.iter_entry_points
-add_activation_listener = working_set.subscribe
-run_script = working_set.run_script
-run_main = run_script   # backward compatibility
-# Activate all distributions already on sys.path, and ensure that
-# all distributions added to the working set in the future (e.g. by
-# calling ``require()``) will get activated as well.
-add_activation_listener(lambda dist: dist.activate())
-working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
-
diff --git a/vendor/distribute-0.6.34/release.py b/vendor/distribute-0.6.34/release.py
deleted file mode 100644
index d3746054bb781514ccef2577b872235ea1cc9e59..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/release.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Script to fully automate the release process. Requires Python 2.6+
-with sphinx installed and the 'hg' command on the path.
-"""
-
-from __future__ import print_function
-
-import subprocess
-import shutil
-import os
-import sys
-import urllib2
-import getpass
-import collections
-
-try:
-	import keyring
-except Exception:
-	pass
-
-VERSION = '0.6.34'
-
-def get_next_version():
-	digits = map(int, VERSION.split('.'))
-	digits[-1] += 1
-	return '.'.join(map(str, digits))
-
-NEXT_VERSION = get_next_version()
-
-files_with_versions = ('docs/conf.py', 'setup.py', 'release.py',
-	'README.txt', 'distribute_setup.py')
-
-def get_repo_name():
-	"""
-	Get the repo name from the hgrc default path.
-	"""
-	default = subprocess.check_output('hg paths default').strip()
-	parts = default.split('/')
-	if parts[-1] == '':
-		parts.pop()
-	return '/'.join(parts[-2:])
-
-def get_mercurial_creds(system='https://bitbucket.org', username=None):
-	"""
-	Return named tuple of username,password in much the same way that
-	Mercurial would (from the keyring).
-	"""
-	# todo: consider getting this from .hgrc
-	username = username or getpass.getuser()
-	keyring_username = '@@'.join((username, system))
-	system = '@'.join((keyring_username, 'Mercurial'))
-	password = (
-		keyring.get_password(system, keyring_username)
-		if 'keyring' in globals()
-		else None
-	)
-	if not password:
-		password = getpass.getpass()
-	Credential = collections.namedtuple('Credential', 'username password')
-	return Credential(username, password)
-
-def add_milestone_and_version(version=NEXT_VERSION):
-	auth = 'Basic ' + ':'.join(get_mercurial_creds()).encode('base64').strip()
-	headers = {
-		'Authorization': auth,
-		}
-	base = 'https://api.bitbucket.org'
-	for type in 'milestones', 'versions':
-		url = (base + '/1.0/repositories/{repo}/issues/{type}'
-			.format(repo = get_repo_name(), type=type))
-		req = urllib2.Request(url = url, headers = headers,
-			data='name='+version)
-		try:
-			urllib2.urlopen(req)
-		except urllib2.HTTPError as e:
-			print(e.fp.read())
-
-def bump_versions():
-	list(map(bump_version, files_with_versions))
-
-def bump_version(filename):
-	with open(filename, 'rb') as f:
-		lines = [line.replace(VERSION, NEXT_VERSION) for line in f]
-	with open(filename, 'wb') as f:
-		f.writelines(lines)
-
-def do_release():
-	assert all(map(os.path.exists, files_with_versions)), (
-		"Expected file(s) missing")
-
-	assert has_sphinx(), "You must have Sphinx installed to release"
-
-	res = raw_input('Have you read through the SCM changelog and '
-		'confirmed the changelog is current for releasing {VERSION}? '
-		.format(**globals()))
-	if not res.lower().startswith('y'):
-		print("Please do that")
-		raise SystemExit(1)
-
-	print("Travis-CI tests: http://travis-ci.org/#!/jaraco/distribute")
-	res = raw_input('Have you or has someone verified that the tests '
-		'pass on this revision? ')
-	if not res.lower().startswith('y'):
-		print("Please do that")
-		raise SystemExit(2)
-
-	subprocess.check_call(['hg', 'tag', VERSION])
-
-	subprocess.check_call(['hg', 'update', VERSION])
-
-	has_docs = build_docs()
-	if os.path.isdir('./dist'):
-		shutil.rmtree('./dist')
-	cmd = [sys.executable, 'setup.py', '-q', 'egg_info', '-RD', '-b', '',
-		'sdist', 'register', 'upload']
-	if has_docs:
-		cmd.append('upload_docs')
-	subprocess.check_call(cmd)
-	upload_bootstrap_script()
-
-	# update to the tip for the next operation
-	subprocess.check_call(['hg', 'update'])
-
-	# we just tagged the current version, bump for the next release.
-	bump_versions()
-	subprocess.check_call(['hg', 'ci', '-m',
-		'Bumped to {NEXT_VERSION} in preparation for next '
-		'release.'.format(**globals())])
-
-	# push the changes
-	subprocess.check_call(['hg', 'push'])
-
-	add_milestone_and_version()
-
-def has_sphinx():
-	try:
-		devnull = open(os.path.devnull, 'wb')
-		subprocess.Popen(['sphinx-build', '--version'], stdout=devnull,
-			stderr=subprocess.STDOUT).wait()
-	except Exception:
-		return False
-	return True
-
-def build_docs():
-	if not os.path.isdir('docs'):
-		return
-	if os.path.isdir('docs/build'):
-		shutil.rmtree('docs/build')
-	subprocess.check_call([
-		'sphinx-build',
-		'-b', 'html',
-		'-d', 'build/doctrees',
-		'.',
-		'build/html',
-		],
-		cwd='docs')
-	return True
-
-def upload_bootstrap_script():
-	scp_command = 'pscp' if sys.platform.startswith('win') else 'scp'
-	try:
-		subprocess.check_call([scp_command, 'distribute_setup.py',
-			'pypi@ziade.org:python-distribute.org/'])
-	except:
-		print("Unable to upload bootstrap script. Ask Tarek to do it.")
-
-if __name__ == '__main__':
-	do_release()
diff --git a/vendor/distribute-0.6.34/setup.cfg b/vendor/distribute-0.6.34/setup.cfg
deleted file mode 100644
index 319f941216a87c0e2d41f8ab7590d6420d85a75e..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setup.cfg
+++ /dev/null
@@ -1,21 +0,0 @@
-[egg_info]
-tag_build = 
-tag_svn_revision = 0
-tag_date = 0
-
-[aliases]
-release = egg_info -RDb ''
-source = register sdist binary
-binary = bdist_egg upload --show-response
-
-[build_sphinx]
-source-dir = docs/
-build-dir = docs/build
-all_files = 1
-
-[upload_docs]
-upload-dir = docs/build/html
-
-[sdist]
-formats = gztar
-
diff --git a/vendor/distribute-0.6.34/setup.py b/vendor/distribute-0.6.34/setup.py
deleted file mode 100644
index a1e9ca91a78a18d44a15d4e6585cb0f0903bb5e7..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setup.py
+++ /dev/null
@@ -1,253 +0,0 @@
-#!/usr/bin/env python
-"""Distutils setup file, used to install or test 'setuptools'"""
-import sys
-import os
-import textwrap
-import re
-
-# Allow to run setup.py from another directory.
-os.chdir(os.path.dirname(os.path.abspath(__file__)))
-
-src_root = None
-if sys.version_info >= (3,):
-    tmp_src = os.path.join("build", "src")
-    from distutils.filelist import FileList
-    from distutils import dir_util, file_util, util, log
-    log.set_verbosity(1)
-    fl = FileList()
-    manifest_file = open("MANIFEST.in")
-    for line in manifest_file:
-        fl.process_template_line(line)
-    manifest_file.close()
-    dir_util.create_tree(tmp_src, fl.files)
-    outfiles_2to3 = []
-    dist_script = os.path.join("build", "src", "distribute_setup.py")
-    for f in fl.files:
-        outf, copied = file_util.copy_file(f, os.path.join(tmp_src, f), update=1)
-        if copied and outf.endswith(".py") and outf != dist_script:
-            outfiles_2to3.append(outf)
-        if copied and outf.endswith('api_tests.txt'):
-            # XXX support this in distutils as well
-            from lib2to3.main import main
-            main('lib2to3.fixes', ['-wd', os.path.join(tmp_src, 'tests', 'api_tests.txt')])
-
-    util.run_2to3(outfiles_2to3)
-
-    # arrange setup to use the copy
-    sys.path.insert(0, os.path.abspath(tmp_src))
-    src_root = tmp_src
-
-from distutils.util import convert_path
-
-d = {}
-init_path = convert_path('setuptools/command/__init__.py')
-init_file = open(init_path)
-exec(init_file.read(), d)
-init_file.close()
-
-SETUP_COMMANDS = d['__all__']
-VERSION = "0.6.34"
-
-from setuptools import setup, find_packages
-from setuptools.command.build_py import build_py as _build_py
-from setuptools.command.test import test as _test
-
-scripts = []
-
-console_scripts = ["easy_install = setuptools.command.easy_install:main"]
-if os.environ.get("DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT") is None:
-    console_scripts.append("easy_install-%s = setuptools.command.easy_install:main" % sys.version[:3])
-
-# specific command that is used to generate windows .exe files
-class build_py(_build_py):
-    def build_package_data(self):
-        """Copy data files into build directory"""
-        lastdir = None
-        for package, src_dir, build_dir, filenames in self.data_files:
-            for filename in filenames:
-                target = os.path.join(build_dir, filename)
-                self.mkpath(os.path.dirname(target))
-                srcfile = os.path.join(src_dir, filename)
-                outf, copied = self.copy_file(srcfile, target)
-                srcfile = os.path.abspath(srcfile)
-
-                # avoid a bootstrapping issue with easy_install -U (when the
-                # previous version doesn't have convert_2to3_doctests)
-                if not hasattr(self.distribution, 'convert_2to3_doctests'):
-                    continue
-
-                if copied and srcfile in self.distribution.convert_2to3_doctests:
-                    self.__doctests_2to3.append(outf)
-
-class test(_test):
-    """Specific test class to avoid rewriting the entry_points.txt"""
-    def run(self):
-        entry_points = os.path.join('distribute.egg-info', 'entry_points.txt')
-
-        if not os.path.exists(entry_points):
-            _test.run(self)
-            return # even though _test.run will raise SystemExit
-
-        f = open(entry_points)
-
-        # running the test
-        try:
-            ep_content = f.read()
-        finally:
-            f.close()
-
-        try:
-            _test.run(self)
-        finally:
-            # restoring the file
-            f = open(entry_points, 'w')
-            try:
-                f.write(ep_content)
-            finally:
-                f.close()
-
-
-# if we are installing Distribute using "python setup.py install"
-# we need to get setuptools out of the way
-def _easy_install_marker():
-    return (len(sys.argv) == 5 and sys.argv[2] == 'bdist_egg' and
-            sys.argv[3] == '--dist-dir' and 'egg-dist-tmp-' in sys.argv[-1])
-
-def _buildout_marker():
-    command = os.environ.get('_')
-    if command:
-        return 'buildout' in os.path.basename(command)
-
-def _being_installed():
-    if os.environ.get('DONT_PATCH_SETUPTOOLS') is not None:
-        return False
-    if _buildout_marker():
-        # Installed by buildout, don't mess with a global setuptools.
-        return False
-    # easy_install marker
-    if "--help" in sys.argv[1:] or "-h" in sys.argv[1:]: # Don't bother doing anything if they're just asking for help
-        return False
-    return  'install' in sys.argv[1:] or _easy_install_marker()
-
-if _being_installed():
-    from distribute_setup import _before_install
-    _before_install()
-
-# return contents of reStructureText file with linked issue references
-def _linkified(rst_path):
-    bitroot = 'http://bitbucket.org/tarek/distribute'
-    revision = re.compile(r'\b(issue\s*#?\d+)\b', re.M | re.I)
-
-    rst_file = open(rst_path)
-    rst_content = rst_file.read()
-    rst_file.close()
-
-    anchors = revision.findall(rst_content) # ['Issue #43', ...]
-    anchors = sorted(set(anchors))
-    rst_content = revision.sub(r'`\1`_', rst_content)
-    rst_content += "\n"
-    for x in anchors:
-        issue = re.findall(r'\d+', x)[0]
-        rst_content += '.. _`%s`: %s/issue/%s\n' % (x, bitroot, issue)
-    rst_content += "\n"
-    return rst_content
-
-readme_file = open('README.txt')
-long_description = readme_file.read() + _linkified('CHANGES.txt')
-readme_file.close()
-
-dist = setup(
-    name="distribute",
-    version=VERSION,
-    description="Easily download, build, install, upgrade, and uninstall "
-                "Python packages",
-    author="The fellowship of the packaging",
-    author_email="distutils-sig@python.org",
-    license="PSF or ZPL",
-    long_description = long_description,
-    keywords = "CPAN PyPI distutils eggs package management",
-    url = "http://packages.python.org/distribute",
-    test_suite = 'setuptools.tests',
-    src_root = src_root,
-    packages = find_packages(),
-    package_data = {'setuptools':['*.exe']},
-
-    py_modules = ['pkg_resources', 'easy_install', 'site'],
-
-    zip_safe = (sys.version>="2.5"),   # <2.5 needs unzipped for -m to work
-
-    cmdclass = {'test': test},
-    entry_points = {
-
-        "distutils.commands" : [
-            "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
-            for cmd in SETUP_COMMANDS
-        ],
-
-        "distutils.setup_keywords": [
-            "eager_resources        = setuptools.dist:assert_string_list",
-            "namespace_packages     = setuptools.dist:check_nsp",
-            "extras_require         = setuptools.dist:check_extras",
-            "install_requires       = setuptools.dist:check_requirements",
-            "tests_require          = setuptools.dist:check_requirements",
-            "entry_points           = setuptools.dist:check_entry_points",
-            "test_suite             = setuptools.dist:check_test_suite",
-            "zip_safe               = setuptools.dist:assert_bool",
-            "package_data           = setuptools.dist:check_package_data",
-            "exclude_package_data   = setuptools.dist:check_package_data",
-            "include_package_data   = setuptools.dist:assert_bool",
-            "packages               = setuptools.dist:check_packages",
-            "dependency_links       = setuptools.dist:assert_string_list",
-            "test_loader            = setuptools.dist:check_importable",
-            "use_2to3               = setuptools.dist:assert_bool",
-            "convert_2to3_doctests  = setuptools.dist:assert_string_list",
-            "use_2to3_fixers        = setuptools.dist:assert_string_list",
-            "use_2to3_exclude_fixers = setuptools.dist:assert_string_list",
-        ],
-
-        "egg_info.writers": [
-            "PKG-INFO = setuptools.command.egg_info:write_pkg_info",
-            "requires.txt = setuptools.command.egg_info:write_requirements",
-            "entry_points.txt = setuptools.command.egg_info:write_entries",
-            "eager_resources.txt = setuptools.command.egg_info:overwrite_arg",
-            "namespace_packages.txt = setuptools.command.egg_info:overwrite_arg",
-            "top_level.txt = setuptools.command.egg_info:write_toplevel_names",
-            "depends.txt = setuptools.command.egg_info:warn_depends_obsolete",
-            "dependency_links.txt = setuptools.command.egg_info:overwrite_arg",
-        ],
-
-        "console_scripts": console_scripts,
-
-        "setuptools.file_finders":
-            ["svn_cvs = setuptools.command.sdist:_default_revctrl"],
-
-        "setuptools.installation":
-            ['eggsecutable = setuptools.command.easy_install:bootstrap'],
-        },
-
-
-    classifiers = textwrap.dedent("""
-        Development Status :: 5 - Production/Stable
-        Intended Audience :: Developers
-        License :: OSI Approved :: Python Software Foundation License
-        License :: OSI Approved :: Zope Public License
-        Operating System :: OS Independent
-        Programming Language :: Python :: 2.4
-        Programming Language :: Python :: 2.5
-        Programming Language :: Python :: 2.6
-        Programming Language :: Python :: 2.7
-        Programming Language :: Python :: 3
-        Programming Language :: Python :: 3.1
-        Programming Language :: Python :: 3.2
-        Programming Language :: Python :: 3.3
-        Topic :: Software Development :: Libraries :: Python Modules
-        Topic :: System :: Archiving :: Packaging
-        Topic :: System :: Systems Administration
-        Topic :: Utilities
-        """).strip().splitlines(),
-    scripts = scripts,
-)
-
-if _being_installed():
-    from distribute_setup import _after_install
-    _after_install(dist)
diff --git a/vendor/distribute-0.6.34/setuptools/__init__.py b/vendor/distribute-0.6.34/setuptools/__init__.py
deleted file mode 100644
index 9de373f98e29100479609d7ea6f1bbcba1f4f22f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/__init__.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""Extensions to the 'distutils' for large or complex distributions"""
-from setuptools.extension import Extension, Library
-from setuptools.dist import Distribution, Feature, _get_unpatched
-import distutils.core, setuptools.command
-from setuptools.depends import Require
-from distutils.core import Command as _Command
-from distutils.util import convert_path
-import os
-import sys
-
-__version__ = '0.6'
-__all__ = [
-    'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
-    'find_packages'
-]
-
-# This marker is used to simplify the process that checks is the
-# setuptools package was installed by the Setuptools project
-# or by the Distribute project, in case Setuptools creates
-# a distribution with the same version.
-#
-# The distribute_setup script for instance, will check if this
-# attribute is present to decide whether to reinstall the package
-# or not.
-_distribute = True
-
-bootstrap_install_from = None
-
-# If we run 2to3 on .py files, should we also convert docstrings?
-# Default: yes; assume that we can detect doctests reliably
-run_2to3_on_doctests = True
-# Standard package names for fixer packages
-lib2to3_fixer_packages = ['lib2to3.fixes']
-
-def find_packages(where='.', exclude=()):
-    """Return a list all Python packages found within directory 'where'
-
-    'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
-    will be converted to the appropriate local path syntax.  'exclude' is a
-    sequence of package names to exclude; '*' can be used as a wildcard in the
-    names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
-    'foo' itself).
-    """
-    out = []
-    stack=[(convert_path(where), '')]
-    while stack:
-        where,prefix = stack.pop(0)
-        for name in os.listdir(where):
-            fn = os.path.join(where,name)
-            if ('.' not in name and os.path.isdir(fn) and
-                os.path.isfile(os.path.join(fn,'__init__.py'))
-            ):
-                out.append(prefix+name); stack.append((fn,prefix+name+'.'))
-    for pat in list(exclude)+['ez_setup', 'distribute_setup']:
-        from fnmatch import fnmatchcase
-        out = [item for item in out if not fnmatchcase(item,pat)]
-    return out
-
-setup = distutils.core.setup
-
-_Command = _get_unpatched(_Command)
-
-class Command(_Command):
-    __doc__ = _Command.__doc__
-
-    command_consumes_arguments = False
-
-    def __init__(self, dist, **kw):
-        # Add support for keyword arguments
-        _Command.__init__(self,dist)
-        for k,v in kw.items():
-            setattr(self,k,v)
-
-    def reinitialize_command(self, command, reinit_subcommands=0, **kw):
-        cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
-        for k,v in kw.items():
-            setattr(cmd,k,v)    # update command with keywords
-        return cmd
-
-import distutils.core
-distutils.core.Command = Command    # we can't patch distutils.cmd, alas
-
-def findall(dir = os.curdir):
-    """Find all files under 'dir' and return the list of full filenames
-    (relative to 'dir').
-    """
-    all_files = []
-    for base, dirs, files in os.walk(dir):
-        if base==os.curdir or base.startswith(os.curdir+os.sep):
-            base = base[2:]
-        if base:
-            files = [os.path.join(base, f) for f in files]
-        all_files.extend(filter(os.path.isfile, files))
-    return all_files
-
-import distutils.filelist
-distutils.filelist.findall = findall    # fix findall bug in distutils.
-
-# sys.dont_write_bytecode was introduced in Python 2.6.
-if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or
-    (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))):
-    _dont_write_bytecode = True
-else:
-    _dont_write_bytecode = False
diff --git a/vendor/distribute-0.6.34/setuptools/archive_util.py b/vendor/distribute-0.6.34/setuptools/archive_util.py
deleted file mode 100644
index e22b25c00ddb7f0449054cfb5b78d8bc04695b26..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/archive_util.py
+++ /dev/null
@@ -1,214 +0,0 @@
-"""Utilities for extracting common archive formats"""
-
-
-__all__ = [
-    "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
-    "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
-]
-
-import zipfile, tarfile, os, shutil
-from pkg_resources import ensure_directory
-from distutils.errors import DistutilsError
-
-class UnrecognizedFormat(DistutilsError):
-    """Couldn't recognize the archive type"""
-
-def default_filter(src,dst):
-    """The default progress/filter callback; returns True for all files"""   
-    return dst
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def unpack_archive(filename, extract_dir, progress_filter=default_filter,
-    drivers=None
-):
-    """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
-
-    `progress_filter` is a function taking two arguments: a source path
-    internal to the archive ('/'-separated), and a filesystem path where it
-    will be extracted.  The callback must return the desired extract path
-    (which may be the same as the one passed in), or else ``None`` to skip
-    that file or directory.  The callback can thus be used to report on the
-    progress of the extraction, as well as to filter the items extracted or
-    alter their extraction paths.
-
-    `drivers`, if supplied, must be a non-empty sequence of functions with the
-    same signature as this function (minus the `drivers` argument), that raise
-    ``UnrecognizedFormat`` if they do not support extracting the designated
-    archive type.  The `drivers` are tried in sequence until one is found that
-    does not raise an error, or until all are exhausted (in which case
-    ``UnrecognizedFormat`` is raised).  If you do not supply a sequence of
-    drivers, the module's ``extraction_drivers`` constant will be used, which
-    means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
-    order.
-    """
-    for driver in drivers or extraction_drivers:
-        try:
-            driver(filename, extract_dir, progress_filter)
-        except UnrecognizedFormat:
-            continue
-        else:
-            return
-    else:
-        raise UnrecognizedFormat(
-            "Not a recognized archive type: %s" % filename
-        )
-
-
-
-
-
-
-
-def unpack_directory(filename, extract_dir, progress_filter=default_filter):
-    """"Unpack" a directory, using the same interface as for archives
-
-    Raises ``UnrecognizedFormat`` if `filename` is not a directory
-    """
-    if not os.path.isdir(filename):
-        raise UnrecognizedFormat("%s is not a directory" % (filename,))
-
-    paths = {filename:('',extract_dir)}
-    for base, dirs, files in os.walk(filename):
-        src,dst = paths[base]
-        for d in dirs:
-            paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
-        for f in files:
-            name = src+f
-            target = os.path.join(dst,f)
-            target = progress_filter(src+f, target)
-            if not target:
-                continue    # skip non-files
-            ensure_directory(target)
-            f = os.path.join(base,f)
-            shutil.copyfile(f, target)
-            shutil.copystat(f, target)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
-    """Unpack zip `filename` to `extract_dir`
-
-    Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
-    by ``zipfile.is_zipfile()``).  See ``unpack_archive()`` for an explanation
-    of the `progress_filter` argument.
-    """
-
-    if not zipfile.is_zipfile(filename):
-        raise UnrecognizedFormat("%s is not a zip file" % (filename,))
-
-    z = zipfile.ZipFile(filename)
-    try:
-        for info in z.infolist():
-            name = info.filename
-
-            # don't extract absolute paths or ones with .. in them
-            if name.startswith('/') or '..' in name:
-                continue
-
-            target = os.path.join(extract_dir, *name.split('/'))
-            target = progress_filter(name, target)
-            if not target:
-                continue
-            if name.endswith('/'):
-                # directory
-                ensure_directory(target)
-            else:
-                # file
-                ensure_directory(target)
-                data = z.read(info.filename)
-                f = open(target,'wb')
-                try:
-                    f.write(data)
-                finally:
-                    f.close()
-                    del data
-            unix_attributes = info.external_attr >> 16
-            if unix_attributes:
-                os.chmod(target, unix_attributes)
-    finally:
-        z.close()
-
-
-def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
-    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
-
-    Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
-    by ``tarfile.open()``).  See ``unpack_archive()`` for an explanation
-    of the `progress_filter` argument.
-    """
-
-    try:
-        tarobj = tarfile.open(filename)
-    except tarfile.TarError:
-        raise UnrecognizedFormat(
-            "%s is not a compressed or uncompressed tar file" % (filename,)
-        )
-
-    try:
-        tarobj.chown = lambda *args: None   # don't do any chowning!
-        for member in tarobj:
-            name = member.name
-            # don't extract absolute paths or ones with .. in them
-            if not name.startswith('/') and '..' not in name:
-                prelim_dst = os.path.join(extract_dir, *name.split('/'))
-                final_dst = progress_filter(name, prelim_dst)
-                # If progress_filter returns None, then we do not extract
-                # this file
-                # TODO: Do we really need to limit to just these file types?
-                # tarobj.extract() will handle all files on all platforms,
-                # turning file types that aren't allowed on that platform into
-                # regular files.
-                if final_dst and (member.isfile() or member.isdir() or
-                        member.islnk() or member.issym()):
-                    tarobj.extract(member, extract_dir)
-                    if final_dst != prelim_dst:
-                        shutil.move(prelim_dst, final_dst)
-        return True
-    finally:
-        tarobj.close()
-
-
-
-
-extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/cli-32.exe b/vendor/distribute-0.6.34/setuptools/cli-32.exe
deleted file mode 100755
index 9b7717b78bbf71f105ccde26746a0f6e3a4d12db..0000000000000000000000000000000000000000
Binary files a/vendor/distribute-0.6.34/setuptools/cli-32.exe and /dev/null differ
diff --git a/vendor/distribute-0.6.34/setuptools/cli-64.exe b/vendor/distribute-0.6.34/setuptools/cli-64.exe
deleted file mode 100755
index 265585afc4042ce55c59d28ef1aab37f0a68ecdc..0000000000000000000000000000000000000000
Binary files a/vendor/distribute-0.6.34/setuptools/cli-64.exe and /dev/null differ
diff --git a/vendor/distribute-0.6.34/setuptools/cli.exe b/vendor/distribute-0.6.34/setuptools/cli.exe
deleted file mode 100755
index 9b7717b78bbf71f105ccde26746a0f6e3a4d12db..0000000000000000000000000000000000000000
Binary files a/vendor/distribute-0.6.34/setuptools/cli.exe and /dev/null differ
diff --git a/vendor/distribute-0.6.34/setuptools/command/__init__.py b/vendor/distribute-0.6.34/setuptools/command/__init__.py
deleted file mode 100644
index b063fa192574da8f4b6fd20a6257164075481031..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-__all__ = [
-    'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
-    'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
-    'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
-    'register', 'bdist_wininst', 'upload_docs',
-]
-
-from setuptools.command import install_scripts
-import sys
-
-if sys.version>='2.5':
-    # In Python 2.5 and above, distutils includes its own upload command
-    __all__.remove('upload')
-
-from distutils.command.bdist import bdist
-
-if 'egg' not in bdist.format_commands:
-    bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
-    bdist.format_commands.append('egg')
-
-del bdist, sys
diff --git a/vendor/distribute-0.6.34/setuptools/command/alias.py b/vendor/distribute-0.6.34/setuptools/command/alias.py
deleted file mode 100644
index f5368b29e9d705f6dc317e38d71ad2c03668df66..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/alias.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-from setuptools.command.setopt import edit_config, option_base, config_file
-
-def shquote(arg):
-    """Quote an argument for later parsing by shlex.split()"""
-    for c in '"', "'", "\\", "#":
-        if c in arg: return repr(arg)
-    if arg.split()<>[arg]:
-        return repr(arg)
-    return arg        
-
-
-class alias(option_base):
-    """Define a shortcut that invokes one or more commands"""
-    
-    description = "define a shortcut to invoke one or more commands"
-    command_consumes_arguments = True
-
-    user_options = [
-        ('remove',   'r', 'remove (unset) the alias'), 
-    ] + option_base.user_options
-
-    boolean_options = option_base.boolean_options + ['remove']
-
-    def initialize_options(self):
-        option_base.initialize_options(self)
-        self.args = None
-        self.remove = None
-
-    def finalize_options(self):
-        option_base.finalize_options(self)
-        if self.remove and len(self.args)<>1:
-            raise DistutilsOptionError(
-                "Must specify exactly one argument (the alias name) when "
-                "using --remove"
-            )
-
-    def run(self):
-        aliases = self.distribution.get_option_dict('aliases')
-
-        if not self.args:
-            print "Command Aliases"
-            print "---------------"
-            for alias in aliases:
-                print "setup.py alias", format_alias(alias, aliases)
-            return
-
-        elif len(self.args)==1:
-            alias, = self.args
-            if self.remove:
-                command = None
-            elif alias in aliases:
-                print "setup.py alias", format_alias(alias, aliases)
-                return
-            else:
-                print "No alias definition found for %r" % alias
-                return
-        else:
-            alias = self.args[0]
-            command = ' '.join(map(shquote,self.args[1:]))
-
-        edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
-
-
-def format_alias(name, aliases):
-    source, command = aliases[name]
-    if source == config_file('global'):
-        source = '--global-config '
-    elif source == config_file('user'):
-        source = '--user-config '
-    elif source == config_file('local'):
-        source = ''
-    else:
-        source = '--filename=%r' % source
-    return source+name+' '+command
-            
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/bdist_egg.py b/vendor/distribute-0.6.34/setuptools/command/bdist_egg.py
deleted file mode 100644
index 17fae984a7cca21ec2c1c0f01a146565f4cbdf93..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/bdist_egg.py
+++ /dev/null
@@ -1,548 +0,0 @@
-"""setuptools.command.bdist_egg
-
-Build .egg distributions"""
-
-# This module should be kept compatible with Python 2.3
-import sys, os, marshal
-from setuptools import Command
-from distutils.dir_util import remove_tree, mkpath
-try:
-    from distutils.sysconfig import get_python_version, get_python_lib
-except ImportError:
-    from sysconfig import get_python_version
-    from distutils.sysconfig import get_python_lib
-
-from distutils import log
-from distutils.errors import DistutilsSetupError
-from pkg_resources import get_build_platform, Distribution, ensure_directory
-from pkg_resources import EntryPoint
-from types import CodeType
-from setuptools.extension import Library
-
-def strip_module(filename):
-    if '.' in filename:
-        filename = os.path.splitext(filename)[0]
-    if filename.endswith('module'):
-        filename = filename[:-6]
-    return filename
-
-def write_stub(resource, pyfile):
-    f = open(pyfile,'w')
-    f.write('\n'.join([
-        "def __bootstrap__():",
-        "   global __bootstrap__, __loader__, __file__",
-        "   import sys, pkg_resources, imp",
-        "   __file__ = pkg_resources.resource_filename(__name__,%r)"
-            % resource,
-        "   __loader__ = None; del __bootstrap__, __loader__",
-        "   imp.load_dynamic(__name__,__file__)",
-        "__bootstrap__()",
-        "" # terminal \n
-    ]))
-    f.close()
-
-# stub __init__.py for packages distributed without one
-NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
-
-class bdist_egg(Command):
-
-    description = "create an \"egg\" distribution"
-
-    user_options = [
-        ('bdist-dir=', 'b',
-            "temporary directory for creating the distribution"),
-        ('plat-name=', 'p',
-                     "platform name to embed in generated filenames "
-                     "(default: %s)" % get_build_platform()),
-        ('exclude-source-files', None,
-                     "remove all .py files from the generated egg"),
-        ('keep-temp', 'k',
-                     "keep the pseudo-installation tree around after " +
-                     "creating the distribution archive"),
-        ('dist-dir=', 'd',
-                     "directory to put final built distributions in"),
-        ('skip-build', None,
-                     "skip rebuilding everything (for testing/debugging)"),
-    ]
-
-    boolean_options = [
-        'keep-temp', 'skip-build', 'exclude-source-files'
-    ]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def initialize_options (self):
-        self.bdist_dir = None
-        self.plat_name = None
-        self.keep_temp = 0
-        self.dist_dir = None
-        self.skip_build = 0
-        self.egg_output = None
-        self.exclude_source_files = None
-
-
-    def finalize_options(self):
-        ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
-        self.egg_info = ei_cmd.egg_info
-
-        if self.bdist_dir is None:
-            bdist_base = self.get_finalized_command('bdist').bdist_base
-            self.bdist_dir = os.path.join(bdist_base, 'egg')
-
-        if self.plat_name is None:
-            self.plat_name = get_build_platform()
-
-        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
-
-        if self.egg_output is None:
-
-            # Compute filename of the output egg
-            basename = Distribution(
-                None, None, ei_cmd.egg_name, ei_cmd.egg_version,
-                get_python_version(),
-                self.distribution.has_ext_modules() and self.plat_name
-            ).egg_name()
-
-            self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
-
-
-
-
-
-
-
-
-    def do_install_data(self):
-        # Hack for packages that install data to install's --install-lib
-        self.get_finalized_command('install').install_lib = self.bdist_dir
-
-        site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
-        old, self.distribution.data_files = self.distribution.data_files,[]
-
-        for item in old:
-            if isinstance(item,tuple) and len(item)==2:
-                if os.path.isabs(item[0]):
-                    realpath = os.path.realpath(item[0])
-                    normalized = os.path.normcase(realpath)
-                    if normalized==site_packages or normalized.startswith(
-                        site_packages+os.sep
-                    ):
-                        item = realpath[len(site_packages)+1:], item[1]
-                    # XXX else: raise ???
-            self.distribution.data_files.append(item)
-
-        try:
-            log.info("installing package data to %s" % self.bdist_dir)
-            self.call_command('install_data', force=0, root=None)
-        finally:
-            self.distribution.data_files = old
-
-
-    def get_outputs(self):
-        return [self.egg_output]
-
-
-    def call_command(self,cmdname,**kw):
-        """Invoke reinitialized command `cmdname` with keyword args"""
-        for dirname in INSTALL_DIRECTORY_ATTRS:
-            kw.setdefault(dirname,self.bdist_dir)
-        kw.setdefault('skip_build',self.skip_build)
-        kw.setdefault('dry_run', self.dry_run)
-        cmd = self.reinitialize_command(cmdname, **kw)
-        self.run_command(cmdname)
-        return cmd
-
-
-    def run(self):
-        # Generate metadata first
-        self.run_command("egg_info")
-
-        # We run install_lib before install_data, because some data hacks
-        # pull their data path from the install_lib command.
-        log.info("installing library code to %s" % self.bdist_dir)
-        instcmd = self.get_finalized_command('install')
-        old_root = instcmd.root; instcmd.root = None
-        cmd = self.call_command('install_lib', warn_dir=0)
-        instcmd.root = old_root
-
-        all_outputs, ext_outputs = self.get_ext_outputs()
-        self.stubs = []
-        to_compile = []
-        for (p,ext_name) in enumerate(ext_outputs):
-            filename,ext = os.path.splitext(ext_name)
-            pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
-            self.stubs.append(pyfile)
-            log.info("creating stub loader for %s" % ext_name)
-            if not self.dry_run:
-                write_stub(os.path.basename(ext_name), pyfile)
-            to_compile.append(pyfile)
-            ext_outputs[p] = ext_name.replace(os.sep,'/')
-
-        to_compile.extend(self.make_init_files())
-        if to_compile:
-            cmd.byte_compile(to_compile)
-
-        if self.distribution.data_files:
-            self.do_install_data()
-
-        # Make the EGG-INFO directory
-        archive_root = self.bdist_dir
-        egg_info = os.path.join(archive_root,'EGG-INFO')
-        self.mkpath(egg_info)
-        if self.distribution.scripts:
-            script_dir = os.path.join(egg_info, 'scripts')
-            log.info("installing scripts to %s" % script_dir)
-            self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
-
-        self.copy_metadata_to(egg_info)
-        native_libs = os.path.join(egg_info, "native_libs.txt")
-        if all_outputs:
-            log.info("writing %s" % native_libs)
-            if not self.dry_run:
-                ensure_directory(native_libs)
-                libs_file = open(native_libs, 'wt')
-                libs_file.write('\n'.join(all_outputs))
-                libs_file.write('\n')
-                libs_file.close()
-        elif os.path.isfile(native_libs):
-            log.info("removing %s" % native_libs)
-            if not self.dry_run:
-                os.unlink(native_libs)
-
-        write_safety_flag(
-            os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
-        )
-
-        if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
-            log.warn(
-                "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
-                "Use the install_requires/extras_require setup() args instead."
-            )
-
-        if self.exclude_source_files:
-            self.zap_pyfiles()
-
-        # Make the archive
-        make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
-                          dry_run=self.dry_run, mode=self.gen_header())
-        if not self.keep_temp:
-            remove_tree(self.bdist_dir, dry_run=self.dry_run)
-
-        # Add to 'Distribution.dist_files' so that the "upload" command works
-        getattr(self.distribution,'dist_files',[]).append(
-            ('bdist_egg',get_python_version(),self.egg_output))
-
-
-
-
-    def zap_pyfiles(self):
-        log.info("Removing .py files from temporary directory")
-        for base,dirs,files in walk_egg(self.bdist_dir):
-            for name in files:
-                if name.endswith('.py'):
-                    path = os.path.join(base,name)
-                    log.debug("Deleting %s", path)
-                    os.unlink(path)
-
-    def zip_safe(self):
-        safe = getattr(self.distribution,'zip_safe',None)
-        if safe is not None:
-            return safe
-        log.warn("zip_safe flag not set; analyzing archive contents...")
-        return analyze_egg(self.bdist_dir, self.stubs)
-
-    def make_init_files(self):
-        """Create missing package __init__ files"""
-        init_files = []
-        for base,dirs,files in walk_egg(self.bdist_dir):
-            if base==self.bdist_dir:
-                # don't put an __init__ in the root
-                continue
-            for name in files:
-                if name.endswith('.py'):
-                    if '__init__.py' not in files:
-                        pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
-                        if self.distribution.has_contents_for(pkg):
-                            log.warn("Creating missing __init__.py for %s",pkg)
-                            filename = os.path.join(base,'__init__.py')
-                            if not self.dry_run:
-                                f = open(filename,'w'); f.write(NS_PKG_STUB)
-                                f.close()
-                            init_files.append(filename)
-                    break
-            else:
-                # not a package, don't traverse to subdirectories
-                dirs[:] = []
-
-        return init_files
-
-    def gen_header(self):
-        epm = EntryPoint.parse_map(self.distribution.entry_points or '')
-        ep = epm.get('setuptools.installation',{}).get('eggsecutable')
-        if ep is None:
-            return 'w'  # not an eggsecutable, do it the usual way.
-
-        if not ep.attrs or ep.extras:
-            raise DistutilsSetupError(
-                "eggsecutable entry point (%r) cannot have 'extras' "
-                "or refer to a module" % (ep,)
-            )
-
-        pyver = sys.version[:3]
-        pkg = ep.module_name
-        full = '.'.join(ep.attrs)
-        base = ep.attrs[0]
-        basename = os.path.basename(self.egg_output)
-
-        header = (
-            "#!/bin/sh\n"
-            'if [ `basename $0` = "%(basename)s" ]\n'
-            'then exec python%(pyver)s -c "'
-            "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
-            "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
-            '" "$@"\n'
-            'else\n'
-            '  echo $0 is not the correct name for this egg file.\n'
-            '  echo Please rename it back to %(basename)s and try again.\n'
-            '  exec false\n'
-            'fi\n'
-
-        ) % locals()
-
-        if not self.dry_run:
-            mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
-            f = open(self.egg_output, 'w')
-            f.write(header)
-            f.close()
-        return 'a'
-
-
-    def copy_metadata_to(self, target_dir):
-        "Copy metadata (egg info) to the target_dir"
-        # normalize the path (so that a forward-slash in egg_info will
-        # match using startswith below)
-        norm_egg_info = os.path.normpath(self.egg_info)
-        prefix = os.path.join(norm_egg_info,'')
-        for path in self.ei_cmd.filelist.files:
-            if path.startswith(prefix):
-                target = os.path.join(target_dir, path[len(prefix):])
-                ensure_directory(target)
-                self.copy_file(path, target)
-
-    def get_ext_outputs(self):
-        """Get a list of relative paths to C extensions in the output distro"""
-
-        all_outputs = []
-        ext_outputs = []
-
-        paths = {self.bdist_dir:''}
-        for base, dirs, files in os.walk(self.bdist_dir):
-            for filename in files:
-                if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
-                    all_outputs.append(paths[base]+filename)
-            for filename in dirs:
-                paths[os.path.join(base,filename)] = paths[base]+filename+'/'
-
-        if self.distribution.has_ext_modules():
-            build_cmd = self.get_finalized_command('build_ext')
-            for ext in build_cmd.extensions:
-                if isinstance(ext,Library):
-                    continue
-                fullname = build_cmd.get_ext_fullname(ext.name)
-                filename = build_cmd.get_ext_filename(fullname)
-                if not os.path.basename(filename).startswith('dl-'):
-                    if os.path.exists(os.path.join(self.bdist_dir,filename)):
-                        ext_outputs.append(filename)
-
-        return all_outputs, ext_outputs
-
-
-NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
-
-
-
-
-def walk_egg(egg_dir):
-    """Walk an unpacked egg's contents, skipping the metadata directory"""
-    walker = os.walk(egg_dir)
-    base,dirs,files = walker.next()
-    if 'EGG-INFO' in dirs:
-        dirs.remove('EGG-INFO')
-    yield base,dirs,files
-    for bdf in walker:
-        yield bdf
-
-def analyze_egg(egg_dir, stubs):
-    # check for existing flag in EGG-INFO
-    for flag,fn in safety_flags.items():
-        if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
-            return flag
-    if not can_scan(): return False
-    safe = True
-    for base, dirs, files in walk_egg(egg_dir):
-        for name in files:
-            if name.endswith('.py') or name.endswith('.pyw'):
-                continue
-            elif name.endswith('.pyc') or name.endswith('.pyo'):
-                # always scan, even if we already know we're not safe
-                safe = scan_module(egg_dir, base, name, stubs) and safe
-    return safe
-
-def write_safety_flag(egg_dir, safe):
-    # Write or remove zip safety flag file(s)
-    for flag,fn in safety_flags.items():
-        fn = os.path.join(egg_dir, fn)
-        if os.path.exists(fn):
-            if safe is None or bool(safe)<>flag:
-                os.unlink(fn)
-        elif safe is not None and bool(safe)==flag:
-            f=open(fn,'wt'); f.write('\n'); f.close()
-
-safety_flags = {
-    True: 'zip-safe',
-    False: 'not-zip-safe',
-}
-
-def scan_module(egg_dir, base, name, stubs):
-    """Check whether module possibly uses unsafe-for-zipfile stuff"""
-
-    filename = os.path.join(base,name)
-    if filename[:-1] in stubs:
-        return True     # Extension module
-    pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
-    module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
-    if sys.version_info < (3, 3):
-        skip = 8   # skip magic & date
-    else:
-        skip = 12  # skip magic & date & file size
-    f = open(filename,'rb'); f.read(skip)
-    code = marshal.load(f); f.close()
-    safe = True
-    symbols = dict.fromkeys(iter_symbols(code))
-    for bad in ['__file__', '__path__']:
-        if bad in symbols:
-            log.warn("%s: module references %s", module, bad)
-            safe = False
-    if 'inspect' in symbols:
-        for bad in [
-            'getsource', 'getabsfile', 'getsourcefile', 'getfile'
-            'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
-            'getinnerframes', 'getouterframes', 'stack', 'trace'
-        ]:
-            if bad in symbols:
-                log.warn("%s: module MAY be using inspect.%s", module, bad)
-                safe = False
-    if '__name__' in symbols and '__main__' in symbols and '.' not in module:
-        if sys.version[:3]=="2.4":  # -m works w/zipfiles in 2.5
-            log.warn("%s: top-level module may be 'python -m' script", module)
-            safe = False
-    return safe
-
-def iter_symbols(code):
-    """Yield names and strings used by `code` and its nested code objects"""
-    for name in code.co_names: yield name
-    for const in code.co_consts:
-        if isinstance(const,basestring):
-            yield const
-        elif isinstance(const,CodeType):
-            for name in iter_symbols(const):
-                yield name
-
-def can_scan():
-    if not sys.platform.startswith('java') and sys.platform != 'cli':
-        # CPython, PyPy, etc.
-        return True
-    log.warn("Unable to analyze compiled code on this platform.")
-    log.warn("Please ask the author to include a 'zip_safe'"
-             " setting (either True or False) in the package's setup.py")
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# Attribute names of options for commands that might need to be convinced to
-# install to the egg build directory
-
-INSTALL_DIRECTORY_ATTRS = [
-    'install_lib', 'install_dir', 'install_data', 'install_base'
-]
-
-def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
-    mode='w'
-):
-    """Create a zip file from all the files under 'base_dir'.  The output
-    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
-    Python module (if available) or the InfoZIP "zip" utility (if installed
-    and found on the default search path).  If neither tool is available,
-    raises DistutilsExecError.  Returns the name of the output zip file.
-    """
-    import zipfile
-    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
-    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
-
-    def visit(z, dirname, names):
-        for name in names:
-            path = os.path.normpath(os.path.join(dirname, name))
-            if os.path.isfile(path):
-                p = path[len(base_dir)+1:]
-                if not dry_run:
-                    z.write(path, p)
-                log.debug("adding '%s'" % p)
-
-    if compress is None:
-        compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
-
-    compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
-    if not dry_run:
-        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
-        for dirname, dirs, files in os.walk(base_dir):
-            visit(z, dirname, files)
-        z.close()
-    else:
-        for dirname, dirs, files in os.walk(base_dir):
-            visit(None, dirname, files)
-    return zip_filename
-#
diff --git a/vendor/distribute-0.6.34/setuptools/command/bdist_rpm.py b/vendor/distribute-0.6.34/setuptools/command/bdist_rpm.py
deleted file mode 100644
index 8c48da35591037462d40d15adb96bdeb4351d30f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/bdist_rpm.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# This is just a kludge so that bdist_rpm doesn't guess wrong about the
-# distribution name and version, if the egg_info command is going to alter
-# them, another kludge to allow you to build old-style non-egg RPMs, and
-# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
-
-from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
-import sys, os
-
-class bdist_rpm(_bdist_rpm):
-
-    def initialize_options(self):
-        _bdist_rpm.initialize_options(self)
-        self.no_egg = None
-
-    if sys.version<"2.5":
-        # Track for uploading any .rpm file(s) moved to self.dist_dir
-        def move_file(self, src, dst, level=1):
-            _bdist_rpm.move_file(self, src, dst, level)
-            if dst==self.dist_dir and src.endswith('.rpm'):
-                getattr(self.distribution,'dist_files',[]).append(
-                    ('bdist_rpm',
-                    src.endswith('.src.rpm') and 'any' or sys.version[:3],
-                     os.path.join(dst, os.path.basename(src)))
-                )
-
-    def run(self):
-        self.run_command('egg_info')    # ensure distro name is up-to-date
-        _bdist_rpm.run(self)
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def _make_spec_file(self):
-        version = self.distribution.get_version()
-        rpmversion = version.replace('-','_')
-        spec = _bdist_rpm._make_spec_file(self)
-        line23 = '%define version '+version
-        line24 = '%define version '+rpmversion
-        spec  = [
-            line.replace(
-                "Source0: %{name}-%{version}.tar",
-                "Source0: %{name}-%{unmangled_version}.tar"
-            ).replace(
-                "setup.py install ",
-                "setup.py install --single-version-externally-managed "
-            ).replace(
-                "%setup",
-                "%setup -n %{name}-%{unmangled_version}"
-            ).replace(line23,line24)
-            for line in spec
-        ]
-        spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
-        return spec
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/bdist_wininst.py b/vendor/distribute-0.6.34/setuptools/command/bdist_wininst.py
deleted file mode 100644
index 93e6846d79c92a461e0a84b0fd8c23fd5e9dfee3..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/bdist_wininst.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
-import os, sys
-
-class bdist_wininst(_bdist_wininst):
-
-    def create_exe(self, arcname, fullname, bitmap=None):
-        _bdist_wininst.create_exe(self, arcname, fullname, bitmap)
-        dist_files = getattr(self.distribution, 'dist_files', [])
-
-        if self.target_version:
-            installer_name = os.path.join(self.dist_dir,
-                                          "%s.win32-py%s.exe" %
-                                           (fullname, self.target_version))
-            pyversion = self.target_version
-
-            # fix 2.5 bdist_wininst ignoring --target-version spec
-            bad = ('bdist_wininst','any',installer_name)
-            if bad in dist_files:
-                dist_files.remove(bad)
-        else:
-            installer_name = os.path.join(self.dist_dir,
-                                          "%s.win32.exe" % fullname)
-            pyversion = 'any'
-        good = ('bdist_wininst', pyversion, installer_name)
-        if good not in dist_files:
-            dist_files.append(good)
-
-    def reinitialize_command (self, command, reinit_subcommands=0):
-        cmd = self.distribution.reinitialize_command(
-            command, reinit_subcommands)
-        if command in ('install', 'install_lib'):
-            cmd.install_lib = None  # work around distutils bug
-        return cmd
-
-    def run(self):
-        self._is_running = True
-        try:
-            _bdist_wininst.run(self)
-        finally:
-            self._is_running = False
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/build_ext.py b/vendor/distribute-0.6.34/setuptools/command/build_ext.py
deleted file mode 100644
index 4a94572cbd60333a7d4c8fbb1c34aa2961f552e3..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/build_ext.py
+++ /dev/null
@@ -1,294 +0,0 @@
-from distutils.command.build_ext import build_ext as _du_build_ext
-try:
-    # Attempt to use Pyrex for building extensions, if available
-    from Pyrex.Distutils.build_ext import build_ext as _build_ext
-except ImportError:
-    _build_ext = _du_build_ext
-
-import os, sys
-from distutils.file_util import copy_file
-from setuptools.extension import Library
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler, get_config_var
-get_config_var("LDSHARED")  # make sure _config_vars is initialized
-from distutils.sysconfig import _config_vars
-from distutils import log
-from distutils.errors import *
-
-have_rtld = False
-use_stubs = False
-libtype = 'shared'
-
-if sys.platform == "darwin":
-    use_stubs = True
-elif os.name != 'nt':
-    try:
-        from dl import RTLD_NOW
-        have_rtld = True
-        use_stubs = True
-    except ImportError:
-        pass
-
-def if_dl(s):
-    if have_rtld:
-        return s
-    return ''
-
-
-
-
-
-
-class build_ext(_build_ext):
-    def run(self):
-        """Build extensions in build directory, then copy if --inplace"""
-        old_inplace, self.inplace = self.inplace, 0
-        _build_ext.run(self)
-        self.inplace = old_inplace
-        if old_inplace:
-            self.copy_extensions_to_source()
-
-    def copy_extensions_to_source(self):
-        build_py = self.get_finalized_command('build_py')
-        for ext in self.extensions:
-            fullname = self.get_ext_fullname(ext.name)
-            filename = self.get_ext_filename(fullname)
-            modpath = fullname.split('.')
-            package = '.'.join(modpath[:-1])
-            package_dir = build_py.get_package_dir(package)
-            dest_filename = os.path.join(package_dir,os.path.basename(filename))
-            src_filename = os.path.join(self.build_lib,filename)
-
-            # Always copy, even if source is older than destination, to ensure
-            # that the right extensions for the current Python/platform are
-            # used.
-            copy_file(
-                src_filename, dest_filename, verbose=self.verbose,
-                dry_run=self.dry_run
-            )
-            if ext._needs_stub:
-                self.write_stub(package_dir or os.curdir, ext, True)
-
-
-    if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
-        # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
-        def swig_sources(self, sources, *otherargs):
-            # first do any Pyrex processing
-            sources = _build_ext.swig_sources(self, sources) or sources
-            # Then do any actual SWIG stuff on the remainder
-            return _du_build_ext.swig_sources(self, sources, *otherargs)
-
-
-
-    def get_ext_filename(self, fullname):
-        filename = _build_ext.get_ext_filename(self,fullname)
-        if fullname not in self.ext_map:
-            return filename
-        ext = self.ext_map[fullname]
-        if isinstance(ext,Library):
-            fn, ext = os.path.splitext(filename)
-            return self.shlib_compiler.library_filename(fn,libtype)
-        elif use_stubs and ext._links_to_dynamic:
-            d,fn = os.path.split(filename)
-            return os.path.join(d,'dl-'+fn)
-        else:
-            return filename
-
-    def initialize_options(self):
-        _build_ext.initialize_options(self)
-        self.shlib_compiler = None
-        self.shlibs = []
-        self.ext_map = {}
-
-    def finalize_options(self):
-        _build_ext.finalize_options(self)
-        self.extensions = self.extensions or []
-        self.check_extensions_list(self.extensions)
-        self.shlibs = [ext for ext in self.extensions
-                        if isinstance(ext,Library)]
-        if self.shlibs:
-            self.setup_shlib_compiler()
-        for ext in self.extensions:
-            ext._full_name = self.get_ext_fullname(ext.name)
-        for ext in self.extensions:
-            fullname = ext._full_name
-            self.ext_map[fullname] = ext
-
-            # distutils 3.1 will also ask for module names
-            # XXX what to do with conflicts?
-            self.ext_map[fullname.split('.')[-1]] = ext
-
-            ltd = ext._links_to_dynamic = \
-                self.shlibs and self.links_to_dynamic(ext) or False
-            ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
-            filename = ext._file_name = self.get_ext_filename(fullname)
-            libdir = os.path.dirname(os.path.join(self.build_lib,filename))
-            if ltd and libdir not in ext.library_dirs:
-                ext.library_dirs.append(libdir)
-            if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
-                ext.runtime_library_dirs.append(os.curdir)
-
-    def setup_shlib_compiler(self):
-        compiler = self.shlib_compiler = new_compiler(
-            compiler=self.compiler, dry_run=self.dry_run, force=self.force
-        )
-        if sys.platform == "darwin":
-            tmp = _config_vars.copy()
-            try:
-                # XXX Help!  I don't have any idea whether these are right...
-                _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
-                _config_vars['CCSHARED'] = " -dynamiclib"
-                _config_vars['SO'] = ".dylib"
-                customize_compiler(compiler)
-            finally:
-                _config_vars.clear()
-                _config_vars.update(tmp)
-        else:
-            customize_compiler(compiler)
-
-        if self.include_dirs is not None:
-            compiler.set_include_dirs(self.include_dirs)
-        if self.define is not None:
-            # 'define' option is a list of (name,value) tuples
-            for (name,value) in self.define:
-                compiler.define_macro(name, value)
-        if self.undef is not None:
-            for macro in self.undef:
-                compiler.undefine_macro(macro)
-        if self.libraries is not None:
-            compiler.set_libraries(self.libraries)
-        if self.library_dirs is not None:
-            compiler.set_library_dirs(self.library_dirs)
-        if self.rpath is not None:
-            compiler.set_runtime_library_dirs(self.rpath)
-        if self.link_objects is not None:
-            compiler.set_link_objects(self.link_objects)
-
-        # hack so distutils' build_extension() builds a library instead
-        compiler.link_shared_object = link_shared_object.__get__(compiler)
-
-
-
-    def get_export_symbols(self, ext):
-        if isinstance(ext,Library):
-            return ext.export_symbols
-        return _build_ext.get_export_symbols(self,ext)
-
-    def build_extension(self, ext):
-        _compiler = self.compiler
-        try:
-            if isinstance(ext,Library):
-                self.compiler = self.shlib_compiler
-            _build_ext.build_extension(self,ext)
-            if ext._needs_stub:
-                self.write_stub(
-                    self.get_finalized_command('build_py').build_lib, ext
-                )
-        finally:
-            self.compiler = _compiler
-
-    def links_to_dynamic(self, ext):
-        """Return true if 'ext' links to a dynamic lib in the same package"""
-        # XXX this should check to ensure the lib is actually being built
-        # XXX as dynamic, and not just using a locally-found version or a
-        # XXX static-compiled version
-        libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
-        pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
-        for libname in ext.libraries:
-            if pkg+libname in libnames: return True
-        return False
-
-    def get_outputs(self):
-        outputs = _build_ext.get_outputs(self)
-        optimize = self.get_finalized_command('build_py').optimize
-        for ext in self.extensions:
-            if ext._needs_stub:
-                base = os.path.join(self.build_lib, *ext._full_name.split('.'))
-                outputs.append(base+'.py')
-                outputs.append(base+'.pyc')
-                if optimize:
-                    outputs.append(base+'.pyo')
-        return outputs
-
-    def write_stub(self, output_dir, ext, compile=False):
-        log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
-        stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
-        if compile and os.path.exists(stub_file):
-            raise DistutilsError(stub_file+" already exists! Please delete.")
-        if not self.dry_run:
-            f = open(stub_file,'w')
-            f.write('\n'.join([
-                "def __bootstrap__():",
-                "   global __bootstrap__, __file__, __loader__",
-                "   import sys, os, pkg_resources, imp"+if_dl(", dl"),
-                "   __file__ = pkg_resources.resource_filename(__name__,%r)"
-                   % os.path.basename(ext._file_name),
-                "   del __bootstrap__",
-                "   if '__loader__' in globals():",
-                "       del __loader__",
-                if_dl("   old_flags = sys.getdlopenflags()"),
-                "   old_dir = os.getcwd()",
-                "   try:",
-                "     os.chdir(os.path.dirname(__file__))",
-                if_dl("     sys.setdlopenflags(dl.RTLD_NOW)"),
-                "     imp.load_dynamic(__name__,__file__)",
-                "   finally:",
-                if_dl("     sys.setdlopenflags(old_flags)"),
-                "     os.chdir(old_dir)",
-                "__bootstrap__()",
-                "" # terminal \n
-            ]))
-            f.close()
-        if compile:
-            from distutils.util import byte_compile
-            byte_compile([stub_file], optimize=0,
-                         force=True, dry_run=self.dry_run)
-            optimize = self.get_finalized_command('install_lib').optimize
-            if optimize > 0:
-                byte_compile([stub_file], optimize=optimize,
-                             force=True, dry_run=self.dry_run)
-            if os.path.exists(stub_file) and not self.dry_run:
-                os.unlink(stub_file)
-
-
-if use_stubs or os.name=='nt':
-    # Build shared libraries
-    #
-    def link_shared_object(self, objects, output_libname, output_dir=None,
-        libraries=None, library_dirs=None, runtime_library_dirs=None,
-        export_symbols=None, debug=0, extra_preargs=None,
-        extra_postargs=None, build_temp=None, target_lang=None
-    ):  self.link(
-            self.SHARED_LIBRARY, objects, output_libname,
-            output_dir, libraries, library_dirs, runtime_library_dirs,
-            export_symbols, debug, extra_preargs, extra_postargs,
-            build_temp, target_lang
-        )
-else:
-    # Build static libraries everywhere else
-    libtype = 'static'
-
-    def link_shared_object(self, objects, output_libname, output_dir=None,
-        libraries=None, library_dirs=None, runtime_library_dirs=None,
-        export_symbols=None, debug=0, extra_preargs=None,
-        extra_postargs=None, build_temp=None, target_lang=None
-    ):
-        # XXX we need to either disallow these attrs on Library instances,
-        #     or warn/abort here if set, or something...
-        #libraries=None, library_dirs=None, runtime_library_dirs=None,
-        #export_symbols=None, extra_preargs=None, extra_postargs=None,
-        #build_temp=None
-
-        assert output_dir is None   # distutils build_ext doesn't pass this
-        output_dir,filename = os.path.split(output_libname)
-        basename, ext = os.path.splitext(filename)
-        if self.library_filename("x").startswith('lib'):
-            # strip 'lib' prefix; this is kludgy if some platform uses
-            # a different prefix
-            basename = basename[3:]
-
-        self.create_static_lib(
-            objects, basename, output_dir, debug, target_lang
-        )
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/build_py.py b/vendor/distribute-0.6.34/setuptools/command/build_py.py
deleted file mode 100644
index 8751acd493cf5c3c45022f29940ee2beda3bb6cd..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/build_py.py
+++ /dev/null
@@ -1,280 +0,0 @@
-import os.path, sys, fnmatch
-from distutils.command.build_py import build_py as _build_py
-from distutils.util import convert_path
-from glob import glob
-
-try:
-    from distutils.util import Mixin2to3 as _Mixin2to3
-    # add support for converting doctests that is missing in 3.1 distutils
-    from distutils import log
-    from lib2to3.refactor import RefactoringTool, get_fixers_from_package
-    import setuptools
-    class DistutilsRefactoringTool(RefactoringTool):
-        def log_error(self, msg, *args, **kw):
-            log.error(msg, *args)
-
-        def log_message(self, msg, *args):
-            log.info(msg, *args)
-
-        def log_debug(self, msg, *args):
-            log.debug(msg, *args)
-
-    class Mixin2to3(_Mixin2to3):
-        def run_2to3(self, files, doctests = False):
-            # See of the distribution option has been set, otherwise check the
-            # setuptools default.
-            if self.distribution.use_2to3 is not True:
-                return
-            if not files:
-                return
-            log.info("Fixing "+" ".join(files))
-            self.__build_fixer_names()
-            self.__exclude_fixers()
-            if doctests:
-                if setuptools.run_2to3_on_doctests:
-                    r = DistutilsRefactoringTool(self.fixer_names)
-                    r.refactor(files, write=True, doctests_only=True)
-            else:
-                _Mixin2to3.run_2to3(self, files)
-
-        def __build_fixer_names(self):
-            if self.fixer_names: return
-            self.fixer_names = []
-            for p in setuptools.lib2to3_fixer_packages:
-                self.fixer_names.extend(get_fixers_from_package(p))
-            if self.distribution.use_2to3_fixers is not None:
-                for p in self.distribution.use_2to3_fixers:
-                    self.fixer_names.extend(get_fixers_from_package(p))
-
-        def __exclude_fixers(self):
-            excluded_fixers = getattr(self, 'exclude_fixers', [])
-            if self.distribution.use_2to3_exclude_fixers is not None:
-                excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
-            for fixer_name in excluded_fixers:
-                if fixer_name in self.fixer_names:
-                    self.fixer_names.remove(fixer_name)
-
-except ImportError:
-    class Mixin2to3:
-        def run_2to3(self, files, doctests=True):
-            # Nothing done in 2.x
-            pass
-
-class build_py(_build_py, Mixin2to3):
-    """Enhanced 'build_py' command that includes data files with packages
-
-    The data files are specified via a 'package_data' argument to 'setup()'.
-    See 'setuptools.dist.Distribution' for more details.
-
-    Also, this version of the 'build_py' command allows you to specify both
-    'py_modules' and 'packages' in the same setup operation.
-    """
-    def finalize_options(self):
-        _build_py.finalize_options(self)
-        self.package_data = self.distribution.package_data
-        self.exclude_package_data = self.distribution.exclude_package_data or {}
-        if 'data_files' in self.__dict__: del self.__dict__['data_files']
-        self.__updated_files = []
-        self.__doctests_2to3 = []
-
-    def run(self):
-        """Build modules, packages, and copy data files to build directory"""
-        if not self.py_modules and not self.packages:
-            return
-
-        if self.py_modules:
-            self.build_modules()
-
-        if self.packages:
-            self.build_packages()
-            self.build_package_data()
-
-        self.run_2to3(self.__updated_files, False)
-        self.run_2to3(self.__updated_files, True)
-        self.run_2to3(self.__doctests_2to3, True)
-
-        # Only compile actual .py files, using our base class' idea of what our
-        # output files are.
-        self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
-
-    def __getattr__(self,attr):
-        if attr=='data_files':  # lazily compute data files
-            self.data_files = files = self._get_data_files(); return files
-        return _build_py.__getattr__(self,attr)
-
-    def build_module(self, module, module_file, package):
-        outfile, copied = _build_py.build_module(self, module, module_file, package)
-        if copied:
-            self.__updated_files.append(outfile)
-        return outfile, copied
-
-    def _get_data_files(self):
-        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
-        self.analyze_manifest()
-        data = []
-        for package in self.packages or ():
-            # Locate package source directory
-            src_dir = self.get_package_dir(package)
-
-            # Compute package build directory
-            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
-
-            # Length of path to strip from found files
-            plen = len(src_dir)+1
-
-            # Strip directory from globbed filenames
-            filenames = [
-                file[plen:] for file in self.find_data_files(package, src_dir)
-                ]
-            data.append( (package, src_dir, build_dir, filenames) )
-        return data
-
-    def find_data_files(self, package, src_dir):
-        """Return filenames for package's data files in 'src_dir'"""
-        globs = (self.package_data.get('', [])
-                 + self.package_data.get(package, []))
-        files = self.manifest_files.get(package, [])[:]
-        for pattern in globs:
-            # Each pattern has to be converted to a platform-specific path
-            files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
-        return self.exclude_data_files(package, src_dir, files)
-
-    def build_package_data(self):
-        """Copy data files into build directory"""
-        lastdir = None
-        for package, src_dir, build_dir, filenames in self.data_files:
-            for filename in filenames:
-                target = os.path.join(build_dir, filename)
-                self.mkpath(os.path.dirname(target))
-                srcfile = os.path.join(src_dir, filename)
-                outf, copied = self.copy_file(srcfile, target)
-                srcfile = os.path.abspath(srcfile)
-                if copied and srcfile in self.distribution.convert_2to3_doctests:
-                    self.__doctests_2to3.append(outf)
-
-
-    def analyze_manifest(self):
-        self.manifest_files = mf = {}
-        if not self.distribution.include_package_data:
-            return
-        src_dirs = {}
-        for package in self.packages or ():
-            # Locate package source directory
-            src_dirs[assert_relative(self.get_package_dir(package))] = package
-
-        self.run_command('egg_info')
-        ei_cmd = self.get_finalized_command('egg_info')
-        for path in ei_cmd.filelist.files:
-            d,f = os.path.split(assert_relative(path))
-            prev = None
-            oldf = f
-            while d and d!=prev and d not in src_dirs:
-                prev = d
-                d, df = os.path.split(d)
-                f = os.path.join(df, f)
-            if d in src_dirs:
-                if path.endswith('.py') and f==oldf:
-                    continue    # it's a module, not data
-                mf.setdefault(src_dirs[d],[]).append(path)
-
-    def get_data_files(self): pass  # kludge 2.4 for lazy computation
-
-    if sys.version<"2.4":    # Python 2.4 already has this code
-        def get_outputs(self, include_bytecode=1):
-            """Return complete list of files copied to the build directory
-
-            This includes both '.py' files and data files, as well as '.pyc'
-            and '.pyo' files if 'include_bytecode' is true.  (This method is
-            needed for the 'install_lib' command to do its job properly, and to
-            generate a correct installation manifest.)
-            """
-            return _build_py.get_outputs(self, include_bytecode) + [
-                os.path.join(build_dir, filename)
-                for package, src_dir, build_dir,filenames in self.data_files
-                for filename in filenames
-                ]
-
-    def check_package(self, package, package_dir):
-        """Check namespace packages' __init__ for declare_namespace"""
-        try:
-            return self.packages_checked[package]
-        except KeyError:
-            pass
-
-        init_py = _build_py.check_package(self, package, package_dir)
-        self.packages_checked[package] = init_py
-
-        if not init_py or not self.distribution.namespace_packages:
-            return init_py
-
-        for pkg in self.distribution.namespace_packages:
-            if pkg==package or pkg.startswith(package+'.'):
-                break
-        else:
-            return init_py
-
-        f = open(init_py,'rbU')
-        if 'declare_namespace'.encode() not in f.read():
-            from distutils import log
-            log.warn(
-               "WARNING: %s is a namespace package, but its __init__.py does\n"
-               "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
-               '(See the setuptools manual under "Namespace Packages" for '
-               "details.)\n", package
-            )
-        f.close()
-        return init_py
-
-    def initialize_options(self):
-        self.packages_checked={}
-        _build_py.initialize_options(self)
-
-
-    def get_package_dir(self, package):
-        res = _build_py.get_package_dir(self, package)
-        if self.distribution.src_root is not None:
-            return os.path.join(self.distribution.src_root, res)
-        return res
-
-
-    def exclude_data_files(self, package, src_dir, files):
-        """Filter filenames for package's data files in 'src_dir'"""
-        globs = (self.exclude_package_data.get('', [])
-                 + self.exclude_package_data.get(package, []))
-        bad = []
-        for pattern in globs:
-            bad.extend(
-                fnmatch.filter(
-                    files, os.path.join(src_dir, convert_path(pattern))
-                )
-            )
-        bad = dict.fromkeys(bad)
-        seen = {}
-        return [
-            f for f in files if f not in bad
-                and f not in seen and seen.setdefault(f,1)  # ditch dupes
-        ]
-
-
-def assert_relative(path):
-    if not os.path.isabs(path):
-        return path
-    from distutils.errors import DistutilsSetupError
-    raise DistutilsSetupError(
-"""Error: setup script specifies an absolute path:
-
-    %s
-
-setup() arguments must *always* be /-separated paths relative to the
-setup.py directory, *never* absolute paths.
-""" % path
-    )
-
-
-
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/develop.py b/vendor/distribute-0.6.34/setuptools/command/develop.py
deleted file mode 100644
index 1d500040d0da40ea372da58a3af19b11137e8598..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/develop.py
+++ /dev/null
@@ -1,167 +0,0 @@
-from setuptools.command.easy_install import easy_install
-from distutils.util import convert_path, subst_vars
-from pkg_resources import Distribution, PathMetadata, normalize_path
-from distutils import log
-from distutils.errors import DistutilsError, DistutilsOptionError
-import os, sys, setuptools, glob
-
-class develop(easy_install):
-    """Set up package for development"""
-
-    description = "install package in 'development mode'"
-
-    user_options = easy_install.user_options + [
-        ("uninstall", "u", "Uninstall this source package"),
-        ("egg-path=", None, "Set the path to be used in the .egg-link file"),
-    ]
-
-    boolean_options = easy_install.boolean_options + ['uninstall']
-
-    command_consumes_arguments = False  # override base
-
-    def run(self):
-        if self.uninstall:
-            self.multi_version = True
-            self.uninstall_link()
-        else:
-            self.install_for_development()
-        self.warn_deprecated_options()
-
-    def initialize_options(self):
-        self.uninstall = None
-        self.egg_path = None
-        easy_install.initialize_options(self)
-        self.setup_path = None
-        self.always_copy_from = '.'   # always copy eggs installed in curdir
-
-
-
-    def finalize_options(self):
-        ei = self.get_finalized_command("egg_info")
-        if ei.broken_egg_info:
-            raise DistutilsError(
-            "Please rename %r to %r before using 'develop'"
-            % (ei.egg_info, ei.broken_egg_info)
-            )
-        self.args = [ei.egg_name]
-
-
-
-
-        easy_install.finalize_options(self)
-        self.expand_basedirs()
-        self.expand_dirs()
-        # pick up setup-dir .egg files only: no .egg-info
-        self.package_index.scan(glob.glob('*.egg'))
-
-        self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
-        self.egg_base = ei.egg_base
-        if self.egg_path is None:
-            self.egg_path = os.path.abspath(ei.egg_base)
-
-        target = normalize_path(self.egg_base)
-        if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
-            raise DistutilsOptionError(
-                "--egg-path must be a relative path from the install"
-                " directory to "+target
-        )
-
-        # Make a distribution for the package's source
-        self.dist = Distribution(
-            target,
-            PathMetadata(target, os.path.abspath(ei.egg_info)),
-            project_name = ei.egg_name
-        )
-
-        p = self.egg_base.replace(os.sep,'/')
-        if p!= os.curdir:
-            p = '../' * (p.count('/')+1)
-        self.setup_path = p
-        p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
-        if  p != normalize_path(os.curdir):
-            raise DistutilsOptionError(
-                "Can't get a consistent path to setup script from"
-                " installation directory", p, normalize_path(os.curdir))
-
-    def install_for_development(self):
-        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
-            # If we run 2to3 we can not do this inplace:
-
-            # Ensure metadata is up-to-date
-            self.reinitialize_command('build_py', inplace=0)
-            self.run_command('build_py')
-            bpy_cmd = self.get_finalized_command("build_py")
-            build_path = normalize_path(bpy_cmd.build_lib)
-
-            # Build extensions
-            self.reinitialize_command('egg_info', egg_base=build_path)
-            self.run_command('egg_info')
-
-            self.reinitialize_command('build_ext', inplace=0)
-            self.run_command('build_ext')
-            
-            # Fixup egg-link and easy-install.pth
-            ei_cmd = self.get_finalized_command("egg_info")
-            self.egg_path = build_path
-            self.dist.location = build_path
-            self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)    # XXX
-        else:
-            # Without 2to3 inplace works fine:
-            self.run_command('egg_info')
-
-            # Build extensions in-place
-            self.reinitialize_command('build_ext', inplace=1)
-            self.run_command('build_ext')
-        
-        self.install_site_py()  # ensure that target dir is site-safe
-        if setuptools.bootstrap_install_from:
-            self.easy_install(setuptools.bootstrap_install_from)
-            setuptools.bootstrap_install_from = None
-
-        # create an .egg-link in the installation dir, pointing to our egg
-        log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
-        if not self.dry_run:
-            f = open(self.egg_link,"w")
-            f.write(self.egg_path + "\n" + self.setup_path)
-            f.close()
-        # postprocess the installed distro, fixing up .pth, installing scripts,
-        # and handling requirements
-        self.process_distribution(None, self.dist, not self.no_deps)
-
-
-    def uninstall_link(self):
-        if os.path.exists(self.egg_link):
-            log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
-            egg_link_file = open(self.egg_link)
-            contents = [line.rstrip() for line in egg_link_file]
-            egg_link_file.close()
-            if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
-                log.warn("Link points to %s: uninstall aborted", contents)
-                return
-            if not self.dry_run:
-                os.unlink(self.egg_link)
-        if not self.dry_run:
-            self.update_pth(self.dist)  # remove any .pth link to us
-        if self.distribution.scripts:
-            # XXX should also check for entry point scripts!
-            log.warn("Note: you must uninstall or replace scripts manually!")
-
-    def install_egg_scripts(self, dist):
-        if dist is not self.dist:
-            # Installing a dependency, so fall back to normal behavior
-            return easy_install.install_egg_scripts(self,dist)
-
-        # create wrapper scripts in the script dir, pointing to dist.scripts
-
-        # new-style...
-        self.install_wrapper_scripts(dist)
-
-        # ...and old-style
-        for script_name in self.distribution.scripts or []:
-            script_path = os.path.abspath(convert_path(script_name))
-            script_name = os.path.basename(script_path)
-            f = open(script_path,'rU')
-            script_text = f.read()
-            f.close()
-            self.install_script(dist, script_name, script_text, script_path)
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/easy_install.py b/vendor/distribute-0.6.34/setuptools/command/easy_install.py
deleted file mode 100644
index 0d72f75843c999f14664c1fed34458f530638ca9..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/easy_install.py
+++ /dev/null
@@ -1,1947 +0,0 @@
-#!python
-"""\
-Easy Install
-------------
-
-A tool for doing automatic download/extract/build of distutils-based Python
-packages.  For detailed documentation, see the accompanying EasyInstall.txt
-file, or visit the `EasyInstall home page`__.
-
-__ http://packages.python.org/distribute/easy_install.html
-
-"""
-import sys
-import os
-import zipimport
-import shutil
-import tempfile
-import zipfile
-import re
-import stat
-import random
-from glob import glob
-from setuptools import Command, _dont_write_bytecode
-from setuptools.sandbox import run_setup
-from distutils import log, dir_util
-from distutils.util import get_platform
-from distutils.util import convert_path, subst_vars
-from distutils.sysconfig import get_python_lib, get_config_vars
-from distutils.errors import DistutilsArgError, DistutilsOptionError, \
-    DistutilsError, DistutilsPlatformError
-from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
-from setuptools.command import setopt
-from setuptools.archive_util import unpack_archive
-from setuptools.package_index import PackageIndex
-from setuptools.package_index import URL_SCHEME
-from setuptools.command import bdist_egg, egg_info
-from pkg_resources import yield_lines, normalize_path, resource_string, \
-        ensure_directory, get_distribution, find_distributions, \
-        Environment, Requirement, Distribution, \
-        PathMetadata, EggMetadata, WorkingSet, \
-         DistributionNotFound, VersionConflict, \
-        DEVELOP_DIST
-
-sys_executable = os.path.normpath(sys.executable)
-
-__all__ = [
-    'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
-    'main', 'get_exe_prefixes',
-]
-
-import site
-HAS_USER_SITE = not sys.version < "2.6" and site.ENABLE_USER_SITE
-
-import struct
-def is_64bit():
-    return struct.calcsize("P") == 8
-
-def samefile(p1,p2):
-    if hasattr(os.path,'samefile') and (
-        os.path.exists(p1) and os.path.exists(p2)
-    ):
-        return os.path.samefile(p1,p2)
-    return (
-        os.path.normpath(os.path.normcase(p1)) ==
-        os.path.normpath(os.path.normcase(p2))
-    )
-
-if sys.version_info <= (3,):
-    def _to_ascii(s):
-        return s
-    def isascii(s):
-        try:
-            unicode(s, 'ascii')
-            return True
-        except UnicodeError:
-            return False
-else:
-    def _to_ascii(s):
-        return s.encode('ascii')
-    def isascii(s):
-        try:
-            s.encode('ascii')
-            return True
-        except UnicodeError:
-            return False
-
-class easy_install(Command):
-    """Manage a download/build/install process"""
-    description = "Find/get/install Python packages"
-    command_consumes_arguments = True
-
-    user_options = [
-        ('prefix=', None, "installation prefix"),
-        ("zip-ok", "z", "install package as a zipfile"),
-        ("multi-version", "m", "make apps have to require() a version"),
-        ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
-        ("install-dir=", "d", "install package to DIR"),
-        ("script-dir=", "s", "install scripts to DIR"),
-        ("exclude-scripts", "x", "Don't install scripts"),
-        ("always-copy", "a", "Copy all needed packages to install dir"),
-        ("index-url=", "i", "base URL of Python Package Index"),
-        ("find-links=", "f", "additional URL(s) to search for packages"),
-        ("delete-conflicting", "D", "no longer needed; don't use this"),
-        ("ignore-conflicts-at-my-risk", None,
-            "no longer needed; don't use this"),
-        ("build-directory=", "b",
-            "download/extract/build in DIR; keep the results"),
-        ('optimize=', 'O',
-         "also compile with optimization: -O1 for \"python -O\", "
-         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
-        ('record=', None,
-         "filename in which to record list of installed files"),
-        ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
-        ('site-dirs=','S',"list of directories where .pth files work"),
-        ('editable', 'e', "Install specified packages in editable form"),
-        ('no-deps', 'N', "don't install dependencies"),
-        ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
-        ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
-        ('version', None, "print version information and exit"),
-        ('no-find-links', None,
-         "Don't load find-links defined in packages being installed")
-    ]
-    boolean_options = [
-        'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
-        'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable',
-        'no-deps', 'local-snapshots-ok', 'version'
-    ]
-
-    if HAS_USER_SITE:
-        user_options.append(('user', None,
-                             "install in user site-package '%s'" % site.USER_SITE))
-        boolean_options.append('user')
-
-
-    negative_opt = {'always-unzip': 'zip-ok'}
-    create_index = PackageIndex
-
-    def initialize_options(self):
-        if HAS_USER_SITE:
-            whereami = os.path.abspath(__file__)
-            self.user = whereami.startswith(site.USER_SITE)
-        else:
-            self.user = 0
-
-        self.zip_ok = self.local_snapshots_ok = None
-        self.install_dir = self.script_dir = self.exclude_scripts = None
-        self.index_url = None
-        self.find_links = None
-        self.build_directory = None
-        self.args = None
-        self.optimize = self.record = None
-        self.upgrade = self.always_copy = self.multi_version = None
-        self.editable = self.no_deps = self.allow_hosts = None
-        self.root = self.prefix = self.no_report = None
-        self.version = None
-        self.install_purelib = None     # for pure module distributions
-        self.install_platlib = None     # non-pure (dists w/ extensions)
-        self.install_headers = None     # for C/C++ headers
-        self.install_lib = None         # set to either purelib or platlib
-        self.install_scripts = None
-        self.install_data = None
-        self.install_base = None
-        self.install_platbase = None
-        if HAS_USER_SITE:
-            self.install_userbase = site.USER_BASE
-            self.install_usersite = site.USER_SITE
-        else:
-            self.install_userbase = None
-            self.install_usersite = None
-        self.no_find_links = None
-
-        # Options not specifiable via command line
-        self.package_index = None
-        self.pth_file = self.always_copy_from = None
-        self.delete_conflicting = None
-        self.ignore_conflicts_at_my_risk = None
-        self.site_dirs = None
-        self.installed_projects = {}
-        self.sitepy_installed = False
-        # Always read easy_install options, even if we are subclassed, or have
-        # an independent instance created.  This ensures that defaults will
-        # always come from the standard configuration file(s)' "easy_install"
-        # section, even if this is a "develop" or "install" command, or some
-        # other embedding.
-        self._dry_run = None
-        self.verbose = self.distribution.verbose
-        self.distribution._set_command_options(
-            self, self.distribution.get_option_dict('easy_install')
-        )
-
-    def delete_blockers(self, blockers):
-        for filename in blockers:
-            if os.path.exists(filename) or os.path.islink(filename):
-                log.info("Deleting %s", filename)
-                if not self.dry_run:
-                    if os.path.isdir(filename) and not os.path.islink(filename):
-                        rmtree(filename)
-                    else:
-                        os.unlink(filename)
-
-    def finalize_options(self):
-        if self.version:
-            print 'distribute %s' % get_distribution('distribute').version
-            sys.exit()
-
-        py_version = sys.version.split()[0]
-        prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
-
-        self.config_vars = {'dist_name': self.distribution.get_name(),
-                            'dist_version': self.distribution.get_version(),
-                            'dist_fullname': self.distribution.get_fullname(),
-                            'py_version': py_version,
-                            'py_version_short': py_version[0:3],
-                            'py_version_nodot': py_version[0] + py_version[2],
-                            'sys_prefix': prefix,
-                            'prefix': prefix,
-                            'sys_exec_prefix': exec_prefix,
-                            'exec_prefix': exec_prefix,
-                            # Only python 3.2+ has abiflags
-                            'abiflags': getattr(sys, 'abiflags', ''),
-                           }
-
-        if HAS_USER_SITE:
-            self.config_vars['userbase'] = self.install_userbase
-            self.config_vars['usersite'] = self.install_usersite
-
-        # fix the install_dir if "--user" was used
-        #XXX: duplicate of the code in the setup command
-        if self.user and HAS_USER_SITE:
-            self.create_home_path()
-            if self.install_userbase is None:
-                raise DistutilsPlatformError(
-                    "User base directory is not specified")
-            self.install_base = self.install_platbase = self.install_userbase
-            if os.name == 'posix':
-                self.select_scheme("unix_user")
-            else:
-                self.select_scheme(os.name + "_user")
-
-        self.expand_basedirs()
-        self.expand_dirs()
-
-        self._expand('install_dir','script_dir','build_directory','site_dirs')
-        # If a non-default installation directory was specified, default the
-        # script directory to match it.
-        if self.script_dir is None:
-            self.script_dir = self.install_dir
-
-        if self.no_find_links is None:
-            self.no_find_links = False
-
-        # Let install_dir get set by install_lib command, which in turn
-        # gets its info from the install command, and takes into account
-        # --prefix and --home and all that other crud.
-        self.set_undefined_options('install_lib',
-            ('install_dir','install_dir')
-        )
-        # Likewise, set default script_dir from 'install_scripts.install_dir'
-        self.set_undefined_options('install_scripts',
-            ('install_dir', 'script_dir')
-        )
-
-        if self.user and self.install_purelib:
-            self.install_dir = self.install_purelib
-            self.script_dir = self.install_scripts
-        # default --record from the install command
-        self.set_undefined_options('install', ('record', 'record'))
-        normpath = map(normalize_path, sys.path)
-        self.all_site_dirs = get_site_dirs()
-        if self.site_dirs is not None:
-            site_dirs = [
-                os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
-            ]
-            for d in site_dirs:
-                if not os.path.isdir(d):
-                    log.warn("%s (in --site-dirs) does not exist", d)
-                elif normalize_path(d) not in normpath:
-                    raise DistutilsOptionError(
-                        d+" (in --site-dirs) is not on sys.path"
-                    )
-                else:
-                    self.all_site_dirs.append(normalize_path(d))
-        if not self.editable: self.check_site_dir()
-        self.index_url = self.index_url or "http://pypi.python.org/simple"
-        self.shadow_path = self.all_site_dirs[:]
-        for path_item in self.install_dir, normalize_path(self.script_dir):
-            if path_item not in self.shadow_path:
-                self.shadow_path.insert(0, path_item)
-
-        if self.allow_hosts is not None:
-            hosts = [s.strip() for s in self.allow_hosts.split(',')]
-        else:
-            hosts = ['*']
-        if self.package_index is None:
-            self.package_index = self.create_index(
-                self.index_url, search_path = self.shadow_path, hosts=hosts,
-            )
-        self.local_index = Environment(self.shadow_path+sys.path)
-
-        if self.find_links is not None:
-            if isinstance(self.find_links, basestring):
-                self.find_links = self.find_links.split()
-        else:
-            self.find_links = []
-        if self.local_snapshots_ok:
-            self.package_index.scan_egg_links(self.shadow_path+sys.path)
-        if not self.no_find_links:
-            self.package_index.add_find_links(self.find_links)
-        self.set_undefined_options('install_lib', ('optimize','optimize'))
-        if not isinstance(self.optimize,int):
-            try:
-                self.optimize = int(self.optimize)
-                if not (0 <= self.optimize <= 2): raise ValueError
-            except ValueError:
-                raise DistutilsOptionError("--optimize must be 0, 1, or 2")
-
-        if self.delete_conflicting and self.ignore_conflicts_at_my_risk:
-            raise DistutilsOptionError(
-                "Can't use both --delete-conflicting and "
-                "--ignore-conflicts-at-my-risk at the same time"
-            )
-        if self.editable and not self.build_directory:
-            raise DistutilsArgError(
-                "Must specify a build directory (-b) when using --editable"
-            )
-        if not self.args:
-            raise DistutilsArgError(
-                "No urls, filenames, or requirements specified (see --help)")
-
-        self.outputs = []
-
-
-    def _expand_attrs(self, attrs):
-        for attr in attrs:
-            val = getattr(self, attr)
-            if val is not None:
-                if os.name == 'posix' or os.name == 'nt':
-                    val = os.path.expanduser(val)
-                val = subst_vars(val, self.config_vars)
-                setattr(self, attr, val)
-
-    def expand_basedirs(self):
-        """Calls `os.path.expanduser` on install_base, install_platbase and
-        root."""
-        self._expand_attrs(['install_base', 'install_platbase', 'root'])
-
-    def expand_dirs(self):
-        """Calls `os.path.expanduser` on install dirs."""
-        self._expand_attrs(['install_purelib', 'install_platlib',
-                            'install_lib', 'install_headers',
-                            'install_scripts', 'install_data',])
-
-    def run(self):
-        if self.verbose != self.distribution.verbose:
-            log.set_verbosity(self.verbose)
-        try:
-            for spec in self.args:
-                self.easy_install(spec, not self.no_deps)
-            if self.record:
-                outputs = self.outputs
-                if self.root:               # strip any package prefix
-                    root_len = len(self.root)
-                    for counter in xrange(len(outputs)):
-                        outputs[counter] = outputs[counter][root_len:]
-                from distutils import file_util
-                self.execute(
-                    file_util.write_file, (self.record, outputs),
-                    "writing list of installed files to '%s'" %
-                    self.record
-                )
-            self.warn_deprecated_options()
-        finally:
-            log.set_verbosity(self.distribution.verbose)
-
-    def pseudo_tempname(self):
-        """Return a pseudo-tempname base in the install directory.
-        This code is intentionally naive; if a malicious party can write to
-        the target directory you're already in deep doodoo.
-        """
-        try:
-            pid = os.getpid()
-        except:
-            pid = random.randint(0,sys.maxint)
-        return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
-
-    def warn_deprecated_options(self):
-        if self.delete_conflicting or self.ignore_conflicts_at_my_risk:
-            log.warn(
-                "Note: The -D, --delete-conflicting and"
-                " --ignore-conflicts-at-my-risk no longer have any purpose"
-                " and should not be used."
-            )
-
-    def check_site_dir(self):
-        """Verify that self.install_dir is .pth-capable dir, if needed"""
-
-        instdir = normalize_path(self.install_dir)
-        pth_file = os.path.join(instdir,'easy-install.pth')
-
-        # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
-        is_site_dir = instdir in self.all_site_dirs
-
-        if not is_site_dir:
-            # No?  Then directly test whether it does .pth file processing
-            is_site_dir = self.check_pth_processing()
-        else:
-            # make sure we can write to target dir
-            testfile = self.pseudo_tempname()+'.write-test'
-            test_exists = os.path.exists(testfile)
-            try:
-                if test_exists: os.unlink(testfile)
-                open(testfile,'w').close()
-                os.unlink(testfile)
-            except (OSError,IOError):
-                self.cant_write_to_target()
-
-        if not is_site_dir and not self.multi_version:
-            # Can't install non-multi to non-site dir
-            raise DistutilsError(self.no_default_version_msg())
-
-        if is_site_dir:
-            if self.pth_file is None:
-                self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
-        else:
-            self.pth_file = None
-
-        PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
-        if instdir not in map(normalize_path, filter(None,PYTHONPATH)):
-            # only PYTHONPATH dirs need a site.py, so pretend it's there
-            self.sitepy_installed = True
-        elif self.multi_version and not os.path.exists(pth_file):
-            self.sitepy_installed = True    # don't need site.py in this case
-            self.pth_file = None            # and don't create a .pth file
-        self.install_dir = instdir
-
-    def cant_write_to_target(self):
-        msg = """can't create or remove files in install directory
-
-The following error occurred while trying to add or remove files in the
-installation directory:
-
-    %s
-
-The installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
-    %s
-"""     % (sys.exc_info()[1], self.install_dir,)
-
-        if not os.path.exists(self.install_dir):
-            msg += """
-This directory does not currently exist.  Please create it and try again, or
-choose a different installation directory (using the -d or --install-dir
-option).
-"""
-        else:
-            msg += """
-Perhaps your account does not have write access to this directory?  If the
-installation directory is a system-owned directory, you may need to sign in
-as the administrator or "root" account.  If you do not have administrative
-access to this machine, you may wish to choose a different installation
-directory, preferably one that is listed in your PYTHONPATH environment
-variable.
-
-For information on other options, you may wish to consult the
-documentation at:
-
-  http://packages.python.org/distribute/easy_install.html
-
-Please make the appropriate changes for your system and try again.
-"""
-        raise DistutilsError(msg)
-
-
-
-
-    def check_pth_processing(self):
-        """Empirically verify whether .pth files are supported in inst. dir"""
-        instdir = self.install_dir
-        log.info("Checking .pth file support in %s", instdir)
-        pth_file = self.pseudo_tempname()+".pth"
-        ok_file = pth_file+'.ok'
-        ok_exists = os.path.exists(ok_file)
-        try:
-            if ok_exists: os.unlink(ok_file)
-            dirname = os.path.dirname(ok_file)
-            if not os.path.exists(dirname):
-                os.makedirs(dirname)
-            f = open(pth_file,'w')
-        except (OSError,IOError):
-            self.cant_write_to_target()
-        else:
-            try:
-                f.write("import os; f = open(%r, 'w'); f.write('OK'); f.close()\n" % (ok_file,))
-                f.close(); f=None
-                executable = sys.executable
-                if os.name=='nt':
-                    dirname,basename = os.path.split(executable)
-                    alt = os.path.join(dirname,'pythonw.exe')
-                    if basename.lower()=='python.exe' and os.path.exists(alt):
-                        # use pythonw.exe to avoid opening a console window
-                        executable = alt
-
-                from distutils.spawn import spawn
-                spawn([executable,'-E','-c','pass'],0)
-
-                if os.path.exists(ok_file):
-                    log.info(
-                        "TEST PASSED: %s appears to support .pth files",
-                        instdir
-                    )
-                    return True
-            finally:
-                if f: f.close()
-                if os.path.exists(ok_file): os.unlink(ok_file)
-                if os.path.exists(pth_file): os.unlink(pth_file)
-        if not self.multi_version:
-            log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
-        return False
-
-    def install_egg_scripts(self, dist):
-        """Write all the scripts for `dist`, unless scripts are excluded"""
-        if not self.exclude_scripts and dist.metadata_isdir('scripts'):
-            for script_name in dist.metadata_listdir('scripts'):
-                self.install_script(
-                    dist, script_name,
-                    dist.get_metadata('scripts/'+script_name)
-                )
-        self.install_wrapper_scripts(dist)
-
-    def add_output(self, path):
-        if os.path.isdir(path):
-            for base, dirs, files in os.walk(path):
-                for filename in files:
-                    self.outputs.append(os.path.join(base,filename))
-        else:
-            self.outputs.append(path)
-
-    def not_editable(self, spec):
-        if self.editable:
-            raise DistutilsArgError(
-                "Invalid argument %r: you can't use filenames or URLs "
-                "with --editable (except via the --find-links option)."
-                % (spec,)
-            )
-
-    def check_editable(self,spec):
-        if not self.editable:
-            return
-
-        if os.path.exists(os.path.join(self.build_directory, spec.key)):
-            raise DistutilsArgError(
-                "%r already exists in %s; can't do a checkout there" %
-                (spec.key, self.build_directory)
-            )
-
-
-
-
-
-
-    def easy_install(self, spec, deps=False):
-        tmpdir = tempfile.mkdtemp(prefix="easy_install-")
-        download = None
-        if not self.editable: self.install_site_py()
-
-        try:
-            if not isinstance(spec,Requirement):
-                if URL_SCHEME(spec):
-                    # It's a url, download it to tmpdir and process
-                    self.not_editable(spec)
-                    download = self.package_index.download(spec, tmpdir)
-                    return self.install_item(None, download, tmpdir, deps, True)
-
-                elif os.path.exists(spec):
-                    # Existing file or directory, just process it directly
-                    self.not_editable(spec)
-                    return self.install_item(None, spec, tmpdir, deps, True)
-                else:
-                    spec = parse_requirement_arg(spec)
-
-            self.check_editable(spec)
-            dist = self.package_index.fetch_distribution(
-                spec, tmpdir, self.upgrade, self.editable, not self.always_copy,
-                self.local_index
-            )
-
-            if dist is None:
-                msg = "Could not find suitable distribution for %r" % spec
-                if self.always_copy:
-                    msg+=" (--always-copy skips system and development eggs)"
-                raise DistutilsError(msg)
-            elif dist.precedence==DEVELOP_DIST:
-                # .egg-info dists don't need installing, just process deps
-                self.process_distribution(spec, dist, deps, "Using")
-                return dist
-            else:
-                return self.install_item(spec, dist.location, tmpdir, deps)
-
-        finally:
-            if os.path.exists(tmpdir):
-                rmtree(tmpdir)
-
-    def install_item(self, spec, download, tmpdir, deps, install_needed=False):
-
-        # Installation is also needed if file in tmpdir or is not an egg
-        install_needed = install_needed or self.always_copy
-        install_needed = install_needed or os.path.dirname(download) == tmpdir
-        install_needed = install_needed or not download.endswith('.egg')
-        install_needed = install_needed or (
-            self.always_copy_from is not None and
-            os.path.dirname(normalize_path(download)) ==
-            normalize_path(self.always_copy_from)
-        )
-
-        if spec and not install_needed:
-            # at this point, we know it's a local .egg, we just don't know if
-            # it's already installed.
-            for dist in self.local_index[spec.project_name]:
-                if dist.location==download:
-                    break
-            else:
-                install_needed = True   # it's not in the local index
-
-        log.info("Processing %s", os.path.basename(download))
-
-        if install_needed:
-            dists = self.install_eggs(spec, download, tmpdir)
-            for dist in dists:
-                self.process_distribution(spec, dist, deps)
-        else:
-            dists = [self.check_conflicts(self.egg_distribution(download))]
-            self.process_distribution(spec, dists[0], deps, "Using")
-
-        if spec is not None:
-            for dist in dists:
-                if dist in spec:
-                    return dist
-
-
-
-    def select_scheme(self, name):
-        """Sets the install directories by applying the install schemes."""
-        # it's the caller's problem if they supply a bad name!
-        scheme = INSTALL_SCHEMES[name]
-        for key in SCHEME_KEYS:
-            attrname = 'install_' + key
-            if getattr(self, attrname) is None:
-                setattr(self, attrname, scheme[key])
-
-
-
-
-    def process_distribution(self, requirement, dist, deps=True, *info):
-        self.update_pth(dist)
-        self.package_index.add(dist)
-        self.local_index.add(dist)
-        if not self.editable:
-            self.install_egg_scripts(dist)
-        self.installed_projects[dist.key] = dist
-        log.info(self.installation_report(requirement, dist, *info))
-        if (dist.has_metadata('dependency_links.txt') and
-            not self.no_find_links):
-            self.package_index.add_find_links(
-                dist.get_metadata_lines('dependency_links.txt')
-            )
-        if not deps and not self.always_copy:
-            return
-        elif requirement is not None and dist.key != requirement.key:
-            log.warn("Skipping dependencies for %s", dist)
-            return  # XXX this is not the distribution we were looking for
-        elif requirement is None or dist not in requirement:
-            # if we wound up with a different version, resolve what we've got
-            distreq = dist.as_requirement()
-            requirement = requirement or distreq
-            requirement = Requirement(
-                distreq.project_name, distreq.specs, requirement.extras
-            )
-        log.info("Processing dependencies for %s", requirement)
-        try:
-            distros = WorkingSet([]).resolve(
-                [requirement], self.local_index, self.easy_install
-            )
-        except DistributionNotFound, e:
-            raise DistutilsError(
-                "Could not find required distribution %s" % e.args
-            )
-        except VersionConflict, e:
-            raise DistutilsError(
-                "Installed distribution %s conflicts with requirement %s"
-                % e.args
-            )
-        if self.always_copy or self.always_copy_from:
-            # Force all the relevant distros to be copied or activated
-            for dist in distros:
-                if dist.key not in self.installed_projects:
-                    self.easy_install(dist.as_requirement())
-        log.info("Finished processing dependencies for %s", requirement)
-
-    def should_unzip(self, dist):
-        if self.zip_ok is not None:
-            return not self.zip_ok
-        if dist.has_metadata('not-zip-safe'):
-            return True
-        if not dist.has_metadata('zip-safe'):
-            return True
-        return True
-
-    def maybe_move(self, spec, dist_filename, setup_base):
-        dst = os.path.join(self.build_directory, spec.key)
-        if os.path.exists(dst):
-            log.warn(
-               "%r already exists in %s; build directory %s will not be kept",
-               spec.key, self.build_directory, setup_base
-            )
-            return setup_base
-        if os.path.isdir(dist_filename):
-            setup_base = dist_filename
-        else:
-            if os.path.dirname(dist_filename)==setup_base:
-                os.unlink(dist_filename)   # get it out of the tmp dir
-            contents = os.listdir(setup_base)
-            if len(contents)==1:
-                dist_filename = os.path.join(setup_base,contents[0])
-                if os.path.isdir(dist_filename):
-                    # if the only thing there is a directory, move it instead
-                    setup_base = dist_filename
-        ensure_directory(dst); shutil.move(setup_base, dst)
-        return dst
-
-    def install_wrapper_scripts(self, dist):
-        if not self.exclude_scripts:
-            for args in get_script_args(dist):
-                self.write_script(*args)
-
-
-
-    def install_script(self, dist, script_name, script_text, dev_path=None):
-        """Generate a legacy script wrapper and install it"""
-        spec = str(dist.as_requirement())
-        is_script = is_python_script(script_text, script_name)
-
-        def get_template(filename):
-            """
-            There are a couple of template scripts in the package. This
-            function loads one of them and prepares it for use.
-
-            These templates use triple-quotes to escape variable
-            substitutions so the scripts get the 2to3 treatment when build
-            on Python 3. The templates cannot use triple-quotes naturally.
-            """
-            raw_bytes = resource_string('setuptools', template_name)
-            template_str = raw_bytes.decode('utf-8')
-            clean_template = template_str.replace('"""', '')
-            return clean_template
-
-        if is_script:
-            template_name = 'script template.py'
-            if dev_path:
-                template_name = template_name.replace('.py', ' (dev).py')
-            script_text = (get_script_header(script_text) +
-                get_template(template_name) % locals())
-        self.write_script(script_name, _to_ascii(script_text), 'b')
-
-    def write_script(self, script_name, contents, mode="t", blockers=()):
-        """Write an executable file to the scripts directory"""
-        self.delete_blockers(   # clean up old .py/.pyw w/o a script
-            [os.path.join(self.script_dir,x) for x in blockers])
-        log.info("Installing %s script to %s", script_name, self.script_dir)
-        target = os.path.join(self.script_dir, script_name)
-        self.add_output(target)
-
-        mask = current_umask()
-        if not self.dry_run:
-            ensure_directory(target)
-            f = open(target,"w"+mode)
-            f.write(contents)
-            f.close()
-            chmod(target, 0777-mask)
-
-
-
-
-    def install_eggs(self, spec, dist_filename, tmpdir):
-        # .egg dirs or files are already built, so just return them
-        if dist_filename.lower().endswith('.egg'):
-            return [self.install_egg(dist_filename, tmpdir)]
-        elif dist_filename.lower().endswith('.exe'):
-            return [self.install_exe(dist_filename, tmpdir)]
-
-        # Anything else, try to extract and build
-        setup_base = tmpdir
-        if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
-            unpack_archive(dist_filename, tmpdir, self.unpack_progress)
-        elif os.path.isdir(dist_filename):
-            setup_base = os.path.abspath(dist_filename)
-
-        if (setup_base.startswith(tmpdir)   # something we downloaded
-            and self.build_directory and spec is not None
-        ):
-            setup_base = self.maybe_move(spec, dist_filename, setup_base)
-
-        # Find the setup.py file
-        setup_script = os.path.join(setup_base, 'setup.py')
-
-        if not os.path.exists(setup_script):
-            setups = glob(os.path.join(setup_base, '*', 'setup.py'))
-            if not setups:
-                raise DistutilsError(
-                    "Couldn't find a setup script in %s" % os.path.abspath(dist_filename)
-                )
-            if len(setups)>1:
-                raise DistutilsError(
-                    "Multiple setup scripts in %s" % os.path.abspath(dist_filename)
-                )
-            setup_script = setups[0]
-
-        # Now run it, and return the result
-        if self.editable:
-            log.info(self.report_editable(spec, setup_script))
-            return []
-        else:
-            return self.build_and_install(setup_script, setup_base)
-
-    def egg_distribution(self, egg_path):
-        if os.path.isdir(egg_path):
-            metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
-        else:
-            metadata = EggMetadata(zipimport.zipimporter(egg_path))
-        return Distribution.from_filename(egg_path,metadata=metadata)
-
-    def install_egg(self, egg_path, tmpdir):
-        destination = os.path.join(self.install_dir,os.path.basename(egg_path))
-        destination = os.path.abspath(destination)
-        if not self.dry_run:
-            ensure_directory(destination)
-
-        dist = self.egg_distribution(egg_path)
-        self.check_conflicts(dist)
-        if not samefile(egg_path, destination):
-            if os.path.isdir(destination) and not os.path.islink(destination):
-                dir_util.remove_tree(destination, dry_run=self.dry_run)
-            elif os.path.exists(destination):
-                self.execute(os.unlink,(destination,),"Removing "+destination)
-            uncache_zipdir(destination)
-            if os.path.isdir(egg_path):
-                if egg_path.startswith(tmpdir):
-                    f,m = shutil.move, "Moving"
-                else:
-                    f,m = shutil.copytree, "Copying"
-            elif self.should_unzip(dist):
-                self.mkpath(destination)
-                f,m = self.unpack_and_compile, "Extracting"
-            elif egg_path.startswith(tmpdir):
-                f,m = shutil.move, "Moving"
-            else:
-                f,m = shutil.copy2, "Copying"
-
-            self.execute(f, (egg_path, destination),
-                (m+" %s to %s") %
-                (os.path.basename(egg_path),os.path.dirname(destination)))
-
-        self.add_output(destination)
-        return self.egg_distribution(destination)
-
-    def install_exe(self, dist_filename, tmpdir):
-        # See if it's valid, get data
-        cfg = extract_wininst_cfg(dist_filename)
-        if cfg is None:
-            raise DistutilsError(
-                "%s is not a valid distutils Windows .exe" % dist_filename
-            )
-        # Create a dummy distribution object until we build the real distro
-        dist = Distribution(None,
-            project_name=cfg.get('metadata','name'),
-            version=cfg.get('metadata','version'), platform=get_platform()
-        )
-
-        # Convert the .exe to an unpacked egg
-        egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
-        egg_tmp  = egg_path+'.tmp'
-        egg_info = os.path.join(egg_tmp, 'EGG-INFO')
-        pkg_inf = os.path.join(egg_info, 'PKG-INFO')
-        ensure_directory(pkg_inf)   # make sure EGG-INFO dir exists
-        dist._provider = PathMetadata(egg_tmp, egg_info)    # XXX
-        self.exe_to_egg(dist_filename, egg_tmp)
-
-        # Write EGG-INFO/PKG-INFO
-        if not os.path.exists(pkg_inf):
-            f = open(pkg_inf,'w')
-            f.write('Metadata-Version: 1.0\n')
-            for k,v in cfg.items('metadata'):
-                if k<>'target_version':
-                    f.write('%s: %s\n' % (k.replace('_','-').title(), v))
-            f.close()
-        script_dir = os.path.join(egg_info,'scripts')
-        self.delete_blockers(   # delete entry-point scripts to avoid duping
-            [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
-        )
-        # Build .egg file from tmpdir
-        bdist_egg.make_zipfile(
-            egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
-        )
-        # install the .egg
-        return self.install_egg(egg_path, tmpdir)
-
-    def exe_to_egg(self, dist_filename, egg_tmp):
-        """Extract a bdist_wininst to the directories an egg would use"""
-        # Check for .pth file and set up prefix translations
-        prefixes = get_exe_prefixes(dist_filename)
-        to_compile = []
-        native_libs = []
-        top_level = {}
-        def process(src,dst):
-            s = src.lower()
-            for old,new in prefixes:
-                if s.startswith(old):
-                    src = new+src[len(old):]
-                    parts = src.split('/')
-                    dst = os.path.join(egg_tmp, *parts)
-                    dl = dst.lower()
-                    if dl.endswith('.pyd') or dl.endswith('.dll'):
-                        parts[-1] = bdist_egg.strip_module(parts[-1])
-                        top_level[os.path.splitext(parts[0])[0]] = 1
-                        native_libs.append(src)
-                    elif dl.endswith('.py') and old!='SCRIPTS/':
-                        top_level[os.path.splitext(parts[0])[0]] = 1
-                        to_compile.append(dst)
-                    return dst
-            if not src.endswith('.pth'):
-                log.warn("WARNING: can't process %s", src)
-            return None
-        # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
-        unpack_archive(dist_filename, egg_tmp, process)
-        stubs = []
-        for res in native_libs:
-            if res.lower().endswith('.pyd'):    # create stubs for .pyd's
-                parts = res.split('/')
-                resource = parts[-1]
-                parts[-1] = bdist_egg.strip_module(parts[-1])+'.py'
-                pyfile = os.path.join(egg_tmp, *parts)
-                to_compile.append(pyfile); stubs.append(pyfile)
-                bdist_egg.write_stub(resource, pyfile)
-        self.byte_compile(to_compile)   # compile .py's
-        bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
-            bdist_egg.analyze_egg(egg_tmp, stubs))  # write zip-safety flag
-
-        for name in 'top_level','native_libs':
-            if locals()[name]:
-                txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
-                if not os.path.exists(txt):
-                    f = open(txt,'w')
-                    f.write('\n'.join(locals()[name])+'\n')
-                    f.close()
-
-    def check_conflicts(self, dist):
-        """Verify that there are no conflicting "old-style" packages"""
-
-        return dist     # XXX temporarily disable until new strategy is stable
-        from imp import find_module, get_suffixes
-        from glob import glob
-
-        blockers = []
-        names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr
-
-        exts = {'.pyc':1, '.pyo':1}     # get_suffixes() might leave one out
-        for ext,mode,typ in get_suffixes():
-            exts[ext] = 1
-
-        for path,files in expand_paths([self.install_dir]+self.all_site_dirs):
-            for filename in files:
-                base,ext = os.path.splitext(filename)
-                if base in names:
-                    if not ext:
-                        # no extension, check for package
-                        try:
-                            f, filename, descr = find_module(base, [path])
-                        except ImportError:
-                            continue
-                        else:
-                            if f: f.close()
-                            if filename not in blockers:
-                                blockers.append(filename)
-                    elif ext in exts and base!='site':  # XXX ugh
-                        blockers.append(os.path.join(path,filename))
-        if blockers:
-            self.found_conflicts(dist, blockers)
-
-        return dist
-
-    def found_conflicts(self, dist, blockers):
-        if self.delete_conflicting:
-            log.warn("Attempting to delete conflicting packages:")
-            return self.delete_blockers(blockers)
-
-        msg = """\
--------------------------------------------------------------------------
-CONFLICT WARNING:
-
-The following modules or packages have the same names as modules or
-packages being installed, and will be *before* the installed packages in
-Python's search path.  You MUST remove all of the relevant files and
-directories before you will be able to use the package(s) you are
-installing:
-
-   %s
-
-""" % '\n   '.join(blockers)
-
-        if self.ignore_conflicts_at_my_risk:
-            msg += """\
-(Note: you can run EasyInstall on '%s' with the
---delete-conflicting option to attempt deletion of the above files
-and/or directories.)
-""" % dist.project_name
-        else:
-            msg += """\
-Note: you can attempt this installation again with EasyInstall, and use
-either the --delete-conflicting (-D) option or the
---ignore-conflicts-at-my-risk option, to either delete the above files
-and directories, or to ignore the conflicts, respectively.  Note that if
-you ignore the conflicts, the installed package(s) may not work.
-"""
-        msg += """\
--------------------------------------------------------------------------
-"""
-        sys.stderr.write(msg)
-        sys.stderr.flush()
-        if not self.ignore_conflicts_at_my_risk:
-            raise DistutilsError("Installation aborted due to conflicts")
-
-    def installation_report(self, req, dist, what="Installed"):
-        """Helpful installation message for display to package users"""
-        msg = "\n%(what)s %(eggloc)s%(extras)s"
-        if self.multi_version and not self.no_report:
-            msg += """
-
-Because this distribution was installed --multi-version, before you can
-import modules from this package in an application, you will need to
-'import pkg_resources' and then use a 'require()' call similar to one of
-these examples, in order to select the desired version:
-
-    pkg_resources.require("%(name)s")  # latest installed version
-    pkg_resources.require("%(name)s==%(version)s")  # this exact version
-    pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
-"""
-            if self.install_dir not in map(normalize_path,sys.path):
-                msg += """
-
-Note also that the installation directory must be on sys.path at runtime for
-this to work.  (e.g. by being the application's script directory, by being on
-PYTHONPATH, or by being added to sys.path by your code.)
-"""
-        eggloc = dist.location
-        name = dist.project_name
-        version = dist.version
-        extras = '' # TODO: self.report_extras(req, dist)
-        return msg % locals()
-
-    def report_editable(self, spec, setup_script):
-        dirname = os.path.dirname(setup_script)
-        python = sys.executable
-        return """\nExtracted editable version of %(spec)s to %(dirname)s
-
-If it uses setuptools in its setup script, you can activate it in
-"development" mode by going to that directory and running::
-
-    %(python)s setup.py develop
-
-See the setuptools documentation for the "develop" command for more info.
-""" % locals()
-
-    def run_setup(self, setup_script, setup_base, args):
-        sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
-        sys.modules.setdefault('distutils.command.egg_info', egg_info)
-
-        args = list(args)
-        if self.verbose>2:
-            v = 'v' * (self.verbose - 1)
-            args.insert(0,'-'+v)
-        elif self.verbose<2:
-            args.insert(0,'-q')
-        if self.dry_run:
-            args.insert(0,'-n')
-        log.info(
-            "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
-        )
-        try:
-            run_setup(setup_script, args)
-        except SystemExit, v:
-            raise DistutilsError("Setup script exited with %s" % (v.args[0],))
-
-    def build_and_install(self, setup_script, setup_base):
-        args = ['bdist_egg', '--dist-dir']
-
-        dist_dir = tempfile.mkdtemp(
-            prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
-        )
-        try:
-            self._set_fetcher_options(os.path.dirname(setup_script))
-            args.append(dist_dir)
-
-            self.run_setup(setup_script, setup_base, args)
-            all_eggs = Environment([dist_dir])
-            eggs = []
-            for key in all_eggs:
-                for dist in all_eggs[key]:
-                    eggs.append(self.install_egg(dist.location, setup_base))
-            if not eggs and not self.dry_run:
-                log.warn("No eggs found in %s (setup script problem?)",
-                    dist_dir)
-            return eggs
-        finally:
-            rmtree(dist_dir)
-            log.set_verbosity(self.verbose) # restore our log verbosity
-
-    def _set_fetcher_options(self, base):
-        """
-        When easy_install is about to run bdist_egg on a source dist, that
-        source dist might have 'setup_requires' directives, requiring
-        additional fetching. Ensure the fetcher options given to easy_install
-        are available to that command as well.
-        """
-        # find the fetch options from easy_install and write them out
-        #  to the setup.cfg file.
-        ei_opts = self.distribution.get_option_dict('easy_install').copy()
-        fetch_directives = (
-            'find_links', 'site_dirs', 'index_url', 'optimize',
-            'site_dirs', 'allow_hosts',
-        )
-        fetch_options = {}
-        for key, val in ei_opts.iteritems():
-            if key not in fetch_directives: continue
-            fetch_options[key.replace('_', '-')] = val[1]
-        # create a settings dictionary suitable for `edit_config`
-        settings = dict(easy_install=fetch_options)
-        cfg_filename = os.path.join(base, 'setup.cfg')
-        setopt.edit_config(cfg_filename, settings)
-
-
-    def update_pth(self,dist):
-        if self.pth_file is None:
-            return
-
-        for d in self.pth_file[dist.key]:    # drop old entries
-            if self.multi_version or d.location != dist.location:
-                log.info("Removing %s from easy-install.pth file", d)
-                self.pth_file.remove(d)
-                if d.location in self.shadow_path:
-                    self.shadow_path.remove(d.location)
-
-        if not self.multi_version:
-            if dist.location in self.pth_file.paths:
-                log.info(
-                    "%s is already the active version in easy-install.pth",
-                    dist
-                )
-            else:
-                log.info("Adding %s to easy-install.pth file", dist)
-                self.pth_file.add(dist) # add new entry
-                if dist.location not in self.shadow_path:
-                    self.shadow_path.append(dist.location)
-
-        if not self.dry_run:
-
-            self.pth_file.save()
-            if dist.key=='distribute':
-                # Ensure that setuptools itself never becomes unavailable!
-                # XXX should this check for latest version?
-                filename = os.path.join(self.install_dir,'setuptools.pth')
-                if os.path.islink(filename): os.unlink(filename)
-                f = open(filename, 'wt')
-                f.write(self.pth_file.make_relative(dist.location)+'\n')
-                f.close()
-
-    def unpack_progress(self, src, dst):
-        # Progress filter for unpacking
-        log.debug("Unpacking %s to %s", src, dst)
-        return dst     # only unpack-and-compile skips files for dry run
-
-    def unpack_and_compile(self, egg_path, destination):
-        to_compile = []; to_chmod = []
-
-        def pf(src,dst):
-            if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
-                to_compile.append(dst)
-                to_chmod.append(dst)
-            elif dst.endswith('.dll') or dst.endswith('.so'):
-                to_chmod.append(dst)
-            self.unpack_progress(src,dst)
-            return not self.dry_run and dst or None
-
-        unpack_archive(egg_path, destination, pf)
-        self.byte_compile(to_compile)
-        if not self.dry_run:
-            for f in to_chmod:
-                mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755
-                chmod(f, mode)
-
-    def byte_compile(self, to_compile):
-        if _dont_write_bytecode:
-            self.warn('byte-compiling is disabled, skipping.')
-            return
-
-        from distutils.util import byte_compile
-        try:
-            # try to make the byte compile messages quieter
-            log.set_verbosity(self.verbose - 1)
-
-            byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
-            if self.optimize:
-                byte_compile(
-                    to_compile, optimize=self.optimize, force=1,
-                    dry_run=self.dry_run
-                )
-        finally:
-            log.set_verbosity(self.verbose)     # restore original verbosity
-
-
-
-
-
-
-
-
-    def no_default_version_msg(self):
-        return """bad install directory or PYTHONPATH
-
-You are attempting to install a package to a directory that is not
-on PYTHONPATH and which Python does not read ".pth" files from.  The
-installation directory you specified (via --install-dir, --prefix, or
-the distutils default setting) was:
-
-    %s
-
-and your PYTHONPATH environment variable currently contains:
-
-    %r
-
-Here are some of your options for correcting the problem:
-
-* You can choose a different installation directory, i.e., one that is
-  on PYTHONPATH or supports .pth files
-
-* You can add the installation directory to the PYTHONPATH environment
-  variable.  (It must then also be on PYTHONPATH whenever you run
-  Python and want to use the package(s) you are installing.)
-
-* You can set up the installation directory to support ".pth" files by
-  using one of the approaches described here:
-
-  http://packages.python.org/distribute/easy_install.html#custom-installation-locations
-
-Please make the appropriate changes for your system and try again.""" % (
-        self.install_dir, os.environ.get('PYTHONPATH','')
-    )
-
-
-
-
-
-
-
-
-
-
-    def install_site_py(self):
-        """Make sure there's a site.py in the target dir, if needed"""
-
-        if self.sitepy_installed:
-            return  # already did it, or don't need to
-
-        sitepy = os.path.join(self.install_dir, "site.py")
-        source = resource_string(Requirement.parse("distribute"), "site.py")
-        current = ""
-
-        if os.path.exists(sitepy):
-            log.debug("Checking existing site.py in %s", self.install_dir)
-            f = open(sitepy,'rb')
-            current = f.read()
-            # we want str, not bytes
-            if sys.version_info >= (3,):
-                current = current.decode()
-
-            f.close()
-            if not current.startswith('def __boot():'):
-                raise DistutilsError(
-                    "%s is not a setuptools-generated site.py; please"
-                    " remove it." % sitepy
-                )
-
-        if current != source:
-            log.info("Creating %s", sitepy)
-            if not self.dry_run:
-                ensure_directory(sitepy)
-                f = open(sitepy,'wb')
-                f.write(source)
-                f.close()
-            self.byte_compile([sitepy])
-
-        self.sitepy_installed = True
-
-
-
-
-    def create_home_path(self):
-        """Create directories under ~."""
-        if not self.user:
-            return
-        home = convert_path(os.path.expanduser("~"))
-        for name, path in self.config_vars.iteritems():
-            if path.startswith(home) and not os.path.isdir(path):
-                self.debug_print("os.makedirs('%s', 0700)" % path)
-                os.makedirs(path, 0700)
-
-
-
-
-
-
-
-    INSTALL_SCHEMES = dict(
-        posix = dict(
-            install_dir = '$base/lib/python$py_version_short/site-packages',
-            script_dir  = '$base/bin',
-        ),
-    )
-
-    DEFAULT_SCHEME = dict(
-        install_dir = '$base/Lib/site-packages',
-        script_dir  = '$base/Scripts',
-    )
-
-    def _expand(self, *attrs):
-        config_vars = self.get_finalized_command('install').config_vars
-
-        if self.prefix:
-            # Set default install_dir/scripts from --prefix
-            config_vars = config_vars.copy()
-            config_vars['base'] = self.prefix
-            scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
-            for attr,val in scheme.items():
-                if getattr(self,attr,None) is None:
-                    setattr(self,attr,val)
-
-        from distutils.util import subst_vars
-        for attr in attrs:
-            val = getattr(self, attr)
-            if val is not None:
-                val = subst_vars(val, config_vars)
-                if os.name == 'posix':
-                    val = os.path.expanduser(val)
-                setattr(self, attr, val)
-
-
-
-
-
-
-
-
-
-def get_site_dirs():
-    # return a list of 'site' dirs
-    sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep))
-    prefixes = [sys.prefix]
-    if sys.exec_prefix != sys.prefix:
-        prefixes.append(sys.exec_prefix)
-    for prefix in prefixes:
-        if prefix:
-            if sys.platform in ('os2emx', 'riscos'):
-                sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
-            elif os.sep == '/':
-                sitedirs.extend([os.path.join(prefix,
-                                         "lib",
-                                         "python" + sys.version[:3],
-                                         "site-packages"),
-                            os.path.join(prefix, "lib", "site-python")])
-            else:
-                sitedirs.extend(
-                    [prefix, os.path.join(prefix, "lib", "site-packages")]
-                )
-            if sys.platform == 'darwin':
-                # for framework builds *only* we add the standard Apple
-                # locations. Currently only per-user, but /Library and
-                # /Network/Library could be added too
-                if 'Python.framework' in prefix:
-                    home = os.environ.get('HOME')
-                    if home:
-                        sitedirs.append(
-                            os.path.join(home,
-                                         'Library',
-                                         'Python',
-                                         sys.version[:3],
-                                         'site-packages'))
-    for plat_specific in (0,1):
-        site_lib = get_python_lib(plat_specific)
-        if site_lib not in sitedirs: sitedirs.append(site_lib)
-
-    if HAS_USER_SITE:
-        sitedirs.append(site.USER_SITE)
-
-    sitedirs = map(normalize_path, sitedirs)
-
-    return sitedirs
-
-
-def expand_paths(inputs):
-    """Yield sys.path directories that might contain "old-style" packages"""
-
-    seen = {}
-
-    for dirname in inputs:
-        dirname = normalize_path(dirname)
-        if dirname in seen:
-            continue
-
-        seen[dirname] = 1
-        if not os.path.isdir(dirname):
-            continue
-
-        files = os.listdir(dirname)
-        yield dirname, files
-
-        for name in files:
-            if not name.endswith('.pth'):
-                # We only care about the .pth files
-                continue
-            if name in ('easy-install.pth','setuptools.pth'):
-                # Ignore .pth files that we control
-                continue
-
-            # Read the .pth file
-            f = open(os.path.join(dirname,name))
-            lines = list(yield_lines(f))
-            f.close()
-
-            # Yield existing non-dupe, non-import directory lines from it
-            for line in lines:
-                if not line.startswith("import"):
-                    line = normalize_path(line.rstrip())
-                    if line not in seen:
-                        seen[line] = 1
-                        if not os.path.isdir(line):
-                            continue
-                        yield line, os.listdir(line)
-
-
-def extract_wininst_cfg(dist_filename):
-    """Extract configuration data from a bdist_wininst .exe
-
-    Returns a ConfigParser.RawConfigParser, or None
-    """
-    f = open(dist_filename,'rb')
-    try:
-        endrec = zipfile._EndRecData(f)
-        if endrec is None:
-            return None
-
-        prepended = (endrec[9] - endrec[5]) - endrec[6]
-        if prepended < 12:  # no wininst data here
-            return None
-        f.seek(prepended-12)
-
-        import struct, StringIO, ConfigParser
-        tag, cfglen, bmlen = struct.unpack("<iii",f.read(12))
-        if tag not in (0x1234567A, 0x1234567B):
-            return None     # not a valid tag
-
-        f.seek(prepended-(12+cfglen))
-        cfg = ConfigParser.RawConfigParser({'version':'','target_version':''})
-        try:
-            part = f.read(cfglen)
-            # part is in bytes, but we need to read up to the first null
-            #  byte.
-            if sys.version_info >= (2,6):
-                null_byte = bytes([0])
-            else:
-                null_byte = chr(0)
-            config = part.split(null_byte, 1)[0]
-            # Now the config is in bytes, but on Python 3, it must be
-            #  unicode for the RawConfigParser, so decode it. Is this the
-            #  right encoding?
-            config = config.decode('ascii')
-            cfg.readfp(StringIO.StringIO(config))
-        except ConfigParser.Error:
-            return None
-        if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
-            return None
-        return cfg
-
-    finally:
-        f.close()
-
-
-
-
-
-
-
-
-def get_exe_prefixes(exe_filename):
-    """Get exe->egg path translations for a given .exe file"""
-
-    prefixes = [
-        ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
-        ('PLATLIB/', ''),
-        ('SCRIPTS/', 'EGG-INFO/scripts/'),
-        ('DATA/LIB/site-packages', ''),
-    ]
-    z = zipfile.ZipFile(exe_filename)
-    try:
-        for info in z.infolist():
-            name = info.filename
-            parts = name.split('/')
-            if len(parts)==3 and parts[2]=='PKG-INFO':
-                if parts[1].endswith('.egg-info'):
-                    prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
-                    break
-            if len(parts)<>2 or not name.endswith('.pth'):
-                continue
-            if name.endswith('-nspkg.pth'):
-                continue
-            if parts[0].upper() in ('PURELIB','PLATLIB'):
-                contents = z.read(name)
-                if sys.version_info >= (3,):
-                    contents = contents.decode()
-                for pth in yield_lines(contents):
-                    pth = pth.strip().replace('\\','/')
-                    if not pth.startswith('import'):
-                        prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
-    finally:
-        z.close()
-    prefixes = [(x.lower(),y) for x, y in prefixes]
-    prefixes.sort(); prefixes.reverse()
-    return prefixes
-
-
-def parse_requirement_arg(spec):
-    try:
-        return Requirement.parse(spec)
-    except ValueError:
-        raise DistutilsError(
-            "Not a URL, existing file, or requirement spec: %r" % (spec,)
-        )
-
-class PthDistributions(Environment):
-    """A .pth file with Distribution paths in it"""
-
-    dirty = False
-
-    def __init__(self, filename, sitedirs=()):
-        self.filename = filename; self.sitedirs=map(normalize_path, sitedirs)
-        self.basedir = normalize_path(os.path.dirname(self.filename))
-        self._load(); Environment.__init__(self, [], None, None)
-        for path in yield_lines(self.paths):
-            map(self.add, find_distributions(path, True))
-
-    def _load(self):
-        self.paths = []
-        saw_import = False
-        seen = dict.fromkeys(self.sitedirs)
-        if os.path.isfile(self.filename):
-            f = open(self.filename,'rt')
-            for line in f:
-                if line.startswith('import'):
-                    saw_import = True
-                    continue
-                path = line.rstrip()
-                self.paths.append(path)
-                if not path.strip() or path.strip().startswith('#'):
-                    continue
-                # skip non-existent paths, in case somebody deleted a package
-                # manually, and duplicate paths as well
-                path = self.paths[-1] = normalize_path(
-                    os.path.join(self.basedir,path)
-                )
-                if not os.path.exists(path) or path in seen:
-                    self.paths.pop()    # skip it
-                    self.dirty = True   # we cleaned up, so we're dirty now :)
-                    continue
-                seen[path] = 1
-            f.close()
-
-        if self.paths and not saw_import:
-            self.dirty = True   # ensure anything we touch has import wrappers
-        while self.paths and not self.paths[-1].strip():
-            self.paths.pop()
-
-    def save(self):
-        """Write changed .pth file back to disk"""
-        if not self.dirty:
-            return
-
-        data = '\n'.join(map(self.make_relative,self.paths))
-        if data:
-            log.debug("Saving %s", self.filename)
-            data = (
-                "import sys; sys.__plen = len(sys.path)\n"
-                "%s\n"
-                "import sys; new=sys.path[sys.__plen:];"
-                " del sys.path[sys.__plen:];"
-                " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
-                " sys.__egginsert = p+len(new)\n"
-            ) % data
-
-            if os.path.islink(self.filename):
-                os.unlink(self.filename)
-            f = open(self.filename,'wt')
-            f.write(data); f.close()
-
-        elif os.path.exists(self.filename):
-            log.debug("Deleting empty %s", self.filename)
-            os.unlink(self.filename)
-
-        self.dirty = False
-
-    def add(self,dist):
-        """Add `dist` to the distribution map"""
-        if (dist.location not in self.paths and (
-                dist.location not in self.sitedirs or
-                dist.location == os.getcwd() #account for '.' being in PYTHONPATH
-                )):
-            self.paths.append(dist.location)
-            self.dirty = True
-        Environment.add(self,dist)
-
-    def remove(self,dist):
-        """Remove `dist` from the distribution map"""
-        while dist.location in self.paths:
-            self.paths.remove(dist.location); self.dirty = True
-        Environment.remove(self,dist)
-
-
-    def make_relative(self,path):
-        npath, last = os.path.split(normalize_path(path))
-        baselen = len(self.basedir)
-        parts = [last]
-        sep = os.altsep=='/' and '/' or os.sep
-        while len(npath)>=baselen:
-            if npath==self.basedir:
-                parts.append(os.curdir)
-                parts.reverse()
-                return sep.join(parts)
-            npath, last = os.path.split(npath)
-            parts.append(last)
-        else:
-            return path
-
-def get_script_header(script_text, executable=sys_executable, wininst=False):
-    """Create a #! line, getting options (if any) from script_text"""
-    from distutils.command.build_scripts import first_line_re
-
-    # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
-    if not isinstance(first_line_re.pattern, str):
-        first_line_re = re.compile(first_line_re.pattern.decode())
-
-    first = (script_text+'\n').splitlines()[0]
-    match = first_line_re.match(first)
-    options = ''
-    if match:
-        options = match.group(1) or ''
-        if options: options = ' '+options
-    if wininst:
-        executable = "python.exe"
-    else:
-        executable = nt_quote_arg(executable)
-    hdr = "#!%(executable)s%(options)s\n" % locals()
-    if not isascii(hdr):
-        # Non-ascii path to sys.executable, use -x to prevent warnings
-        if options:
-            if options.strip().startswith('-'):
-                options = ' -x'+options.strip()[1:]
-            # else: punt, we can't do it, let the warning happen anyway
-        else:
-            options = ' -x'
-    executable = fix_jython_executable(executable, options)
-    hdr = "#!%(executable)s%(options)s\n" % locals()
-    return hdr
-
-def auto_chmod(func, arg, exc):
-    if func is os.remove and os.name=='nt':
-        chmod(arg, stat.S_IWRITE)
-        return func(arg)
-    exc = sys.exc_info()
-    raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg)))
-
-def uncache_zipdir(path):
-    """Ensure that the importer caches dont have stale info for `path`"""
-    from zipimport import _zip_directory_cache as zdc
-    _uncache(path, zdc)
-    _uncache(path, sys.path_importer_cache)
-
-def _uncache(path, cache):
-    if path in cache:
-        del cache[path]
-    else:
-        path = normalize_path(path)
-        for p in cache:
-            if normalize_path(p)==path:
-                del cache[p]
-                return
-
-def is_python(text, filename='<string>'):
-    "Is this string a valid Python script?"
-    try:
-        compile(text, filename, 'exec')
-    except (SyntaxError, TypeError):
-        return False
-    else:
-        return True
-
-def is_sh(executable):
-    """Determine if the specified executable is a .sh (contains a #! line)"""
-    try:
-        fp = open(executable)
-        magic = fp.read(2)
-        fp.close()
-    except (OSError,IOError): return executable
-    return magic == '#!'
-
-def nt_quote_arg(arg):
-    """Quote a command line argument according to Windows parsing rules"""
-
-    result = []
-    needquote = False
-    nb = 0
-
-    needquote = (" " in arg) or ("\t" in arg)
-    if needquote:
-        result.append('"')
-
-    for c in arg:
-        if c == '\\':
-            nb += 1
-        elif c == '"':
-            # double preceding backslashes, then add a \"
-            result.append('\\' * (nb*2) + '\\"')
-            nb = 0
-        else:
-            if nb:
-                result.append('\\' * nb)
-                nb = 0
-            result.append(c)
-
-    if nb:
-        result.append('\\' * nb)
-
-    if needquote:
-        result.append('\\' * nb)    # double the trailing backslashes
-        result.append('"')
-
-    return ''.join(result)
-
-
-
-
-
-
-
-
-
-def is_python_script(script_text, filename):
-    """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
-    """
-    if filename.endswith('.py') or filename.endswith('.pyw'):
-        return True     # extension says it's Python
-    if is_python(script_text, filename):
-        return True     # it's syntactically valid Python
-    if script_text.startswith('#!'):
-        # It begins with a '#!' line, so check if 'python' is in it somewhere
-        return 'python' in script_text.splitlines()[0].lower()
-
-    return False    # Not any Python I can recognize
-
-try:
-    from os import chmod as _chmod
-except ImportError:
-    # Jython compatibility
-    def _chmod(*args): pass
-
-def chmod(path, mode):
-    log.debug("changing mode of %s to %o", path, mode)
-    try:
-        _chmod(path, mode)
-    except os.error, e:
-        log.debug("chmod failed: %s", e)
-
-def fix_jython_executable(executable, options):
-    if sys.platform.startswith('java') and is_sh(executable):
-        # Workaround for Jython is not needed on Linux systems.
-        import java
-        if java.lang.System.getProperty("os.name") == "Linux":
-            return executable
-
-        # Workaround Jython's sys.executable being a .sh (an invalid
-        # shebang line interpreter)
-        if options:
-            # Can't apply the workaround, leave it broken
-            log.warn("WARNING: Unable to adapt shebang line for Jython,"
-                             " the following script is NOT executable\n"
-                     "         see http://bugs.jython.org/issue1112 for"
-                             " more information.")
-        else:
-            return '/usr/bin/env %s' % executable
-    return executable
-
-
-def get_script_args(dist, executable=sys_executable, wininst=False):
-    """Yield write_script() argument tuples for a distribution's entrypoints"""
-    spec = str(dist.as_requirement())
-    header = get_script_header("", executable, wininst)
-    for group in 'console_scripts', 'gui_scripts':
-        for name, ep in dist.get_entry_map(group).items():
-            script_text = (
-                "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n"
-                "__requires__ = %(spec)r\n"
-                "import sys\n"
-                "from pkg_resources import load_entry_point\n"
-                "\n"
-                "if __name__ == '__main__':"
-                "\n"
-                "    sys.exit(\n"
-                "        load_entry_point(%(spec)r, %(group)r, %(name)r)()\n"
-                "    )\n"
-            ) % locals()
-            if sys.platform=='win32' or wininst:
-                # On Windows/wininst, add a .py extension and an .exe launcher
-                if group=='gui_scripts':
-                    ext, launcher = '-script.pyw', 'gui.exe'
-                    old = ['.pyw']
-                    new_header = re.sub('(?i)python.exe','pythonw.exe',header)
-                else:
-                    ext, launcher = '-script.py', 'cli.exe'
-                    old = ['.py','.pyc','.pyo']
-                    new_header = re.sub('(?i)pythonw.exe','python.exe',header)
-                if is_64bit():
-                    launcher = launcher.replace(".", "-64.")
-                else:
-                    launcher = launcher.replace(".", "-32.")
-                if os.path.exists(new_header[2:-1]) or sys.platform!='win32':
-                    hdr = new_header
-                else:
-                    hdr = header
-                yield (name+ext, hdr+script_text, 't', [name+x for x in old])
-                yield (
-                    name+'.exe', resource_string('setuptools', launcher),
-                    'b' # write in binary mode
-                )
-            else:
-                # On other platforms, we assume the right thing to do is to
-                # just write the stub with no extension.
-                yield (name, header+script_text)
-
-def rmtree(path, ignore_errors=False, onerror=auto_chmod):
-    """Recursively delete a directory tree.
-
-    This code is taken from the Python 2.4 version of 'shutil', because
-    the 2.3 version doesn't really work right.
-    """
-    if ignore_errors:
-        def onerror(*args):
-            pass
-    elif onerror is None:
-        def onerror(*args):
-            raise
-    names = []
-    try:
-        names = os.listdir(path)
-    except os.error, err:
-        onerror(os.listdir, path, sys.exc_info())
-    for name in names:
-        fullname = os.path.join(path, name)
-        try:
-            mode = os.lstat(fullname).st_mode
-        except os.error:
-            mode = 0
-        if stat.S_ISDIR(mode):
-            rmtree(fullname, ignore_errors, onerror)
-        else:
-            try:
-                os.remove(fullname)
-            except os.error, err:
-                onerror(os.remove, fullname, sys.exc_info())
-    try:
-        os.rmdir(path)
-    except os.error:
-        onerror(os.rmdir, path, sys.exc_info())
-
-def current_umask():
-    tmp = os.umask(022)
-    os.umask(tmp)
-    return tmp
-
-def bootstrap():
-    # This function is called when setuptools*.egg is run using /bin/sh
-    import setuptools; argv0 = os.path.dirname(setuptools.__path__[0])
-    sys.argv[0] = argv0; sys.argv.append(argv0); main()
-
-def main(argv=None, **kw):
-    from setuptools import setup
-    from setuptools.dist import Distribution
-    import distutils.core
-
-    USAGE = """\
-usage: %(script)s [options] requirement_or_url ...
-   or: %(script)s --help
-"""
-
-    def gen_usage (script_name):
-        script = os.path.basename(script_name)
-        return USAGE % vars()
-
-    def with_ei_usage(f):
-        old_gen_usage = distutils.core.gen_usage
-        try:
-            distutils.core.gen_usage = gen_usage
-            return f()
-        finally:
-            distutils.core.gen_usage = old_gen_usage
-
-    class DistributionWithoutHelpCommands(Distribution):
-        common_usage = ""
-
-        def _show_help(self,*args,**kw):
-            with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
-
-        def find_config_files(self):
-            files = Distribution.find_config_files(self)
-            if 'setup.cfg' in files:
-                files.remove('setup.cfg')
-            return files
-
-    if argv is None:
-        argv = sys.argv[1:]
-
-    with_ei_usage(lambda:
-        setup(
-            script_args = ['-q','easy_install', '-v']+argv,
-            script_name = sys.argv[0] or 'easy_install',
-            distclass=DistributionWithoutHelpCommands, **kw
-        )
-    )
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/egg_info.py b/vendor/distribute-0.6.34/setuptools/command/egg_info.py
deleted file mode 100644
index 0c2ea0cca340f5d5bf066993e7d305c357e7d2c8..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/egg_info.py
+++ /dev/null
@@ -1,486 +0,0 @@
-"""setuptools.command.egg_info
-
-Create a distribution's .egg-info directory and contents"""
-
-# This module should be kept compatible with Python 2.3
-import os, re, sys
-from setuptools import Command
-from distutils.errors import *
-from distutils import log
-from setuptools.command.sdist import sdist
-from distutils.util import convert_path
-from distutils.filelist import FileList as _FileList
-from pkg_resources import parse_requirements, safe_name, parse_version, \
-    safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
-from sdist import walk_revctrl
-
-class egg_info(Command):
-    description = "create a distribution's .egg-info directory"
-
-    user_options = [
-        ('egg-base=', 'e', "directory containing .egg-info directories"
-                           " (default: top of the source tree)"),
-        ('tag-svn-revision', 'r',
-            "Add subversion revision ID to version number"),
-        ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
-        ('tag-build=', 'b', "Specify explicit tag to add to version number"),
-        ('no-svn-revision', 'R',
-            "Don't add subversion revision ID [default]"),
-        ('no-date', 'D', "Don't include date stamp [default]"),
-    ]
-
-    boolean_options = ['tag-date', 'tag-svn-revision']
-    negative_opt = {'no-svn-revision': 'tag-svn-revision',
-                    'no-date': 'tag-date'}
-
-
-
-
-
-
-
-    def initialize_options(self):
-        self.egg_name = None
-        self.egg_version = None
-        self.egg_base = None
-        self.egg_info = None
-        self.tag_build = None
-        self.tag_svn_revision = 0
-        self.tag_date = 0
-        self.broken_egg_info = False
-        self.vtags = None
-
-    def save_version_info(self, filename):
-        from setopt import edit_config
-        edit_config(
-            filename,
-            {'egg_info':
-                {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()}
-            }
-        )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def finalize_options (self):
-        self.egg_name = safe_name(self.distribution.get_name())
-        self.vtags = self.tags()
-        self.egg_version = self.tagged_version()
-
-        try:
-            list(
-                parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
-            )
-        except ValueError:
-            raise DistutilsOptionError(
-                "Invalid distribution name or version syntax: %s-%s" %
-                (self.egg_name,self.egg_version)
-            )
-
-        if self.egg_base is None:
-            dirs = self.distribution.package_dir
-            self.egg_base = (dirs or {}).get('',os.curdir)
-
-        self.ensure_dirname('egg_base')
-        self.egg_info = to_filename(self.egg_name)+'.egg-info'
-        if self.egg_base != os.curdir:
-            self.egg_info = os.path.join(self.egg_base, self.egg_info)
-        if '-' in self.egg_name: self.check_broken_egg_info()
-
-        # Set package version for the benefit of dumber commands
-        # (e.g. sdist, bdist_wininst, etc.)
-        #
-        self.distribution.metadata.version = self.egg_version
-
-        # If we bootstrapped around the lack of a PKG-INFO, as might be the
-        # case in a fresh checkout, make sure that any special tags get added
-        # to the version info
-        #
-        pd = self.distribution._patched_dist
-        if pd is not None and pd.key==self.egg_name.lower():
-            pd._version = self.egg_version
-            pd._parsed_version = parse_version(self.egg_version)
-            self.distribution._patched_dist = None
-
-
-    def write_or_delete_file(self, what, filename, data, force=False):
-        """Write `data` to `filename` or delete if empty
-
-        If `data` is non-empty, this routine is the same as ``write_file()``.
-        If `data` is empty but not ``None``, this is the same as calling
-        ``delete_file(filename)`.  If `data` is ``None``, then this is a no-op
-        unless `filename` exists, in which case a warning is issued about the
-        orphaned file (if `force` is false), or deleted (if `force` is true).
-        """
-        if data:
-            self.write_file(what, filename, data)
-        elif os.path.exists(filename):
-            if data is None and not force:
-                log.warn(
-                    "%s not set in setup(), but %s exists", what, filename
-                )
-                return
-            else:
-                self.delete_file(filename)
-
-    def write_file(self, what, filename, data):
-        """Write `data` to `filename` (if not a dry run) after announcing it
-
-        `what` is used in a log message to identify what is being written
-        to the file.
-        """
-        log.info("writing %s to %s", what, filename)
-        if sys.version_info >= (3,):
-            data = data.encode("utf-8")
-        if not self.dry_run:
-            f = open(filename, 'wb')
-            f.write(data)
-            f.close()
-
-    def delete_file(self, filename):
-        """Delete `filename` (if not a dry run) after announcing it"""
-        log.info("deleting %s", filename)
-        if not self.dry_run:
-            os.unlink(filename)
-
-    def tagged_version(self):
-        version = self.distribution.get_version()
-        # egg_info may be called more than once for a distribution,
-        # in which case the version string already contains all tags.
-        if self.vtags and version.endswith(self.vtags):
-            return safe_version(version)
-        return safe_version(version + self.vtags)
-
-    def run(self):
-        self.mkpath(self.egg_info)
-        installer = self.distribution.fetch_build_egg
-        for ep in iter_entry_points('egg_info.writers'):
-            writer = ep.load(installer=installer)
-            writer(self, ep.name, os.path.join(self.egg_info,ep.name))
-
-        # Get rid of native_libs.txt if it was put there by older bdist_egg
-        nl = os.path.join(self.egg_info, "native_libs.txt")
-        if os.path.exists(nl):
-            self.delete_file(nl)
-
-        self.find_sources()
-
-    def tags(self):
-        version = ''
-        if self.tag_build:
-            version+=self.tag_build
-        if self.tag_svn_revision and (
-            os.path.exists('.svn') or os.path.exists('PKG-INFO')
-        ):  version += '-r%s' % self.get_svn_revision()
-        if self.tag_date:
-            import time; version += time.strftime("-%Y%m%d")
-        return version
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def get_svn_revision(self):
-        revision = 0
-        urlre = re.compile('url="([^"]+)"')
-        revre = re.compile('committed-rev="(\d+)"')
-
-        for base,dirs,files in os.walk(os.curdir):
-            if '.svn' not in dirs:
-                dirs[:] = []
-                continue    # no sense walking uncontrolled subdirs
-            dirs.remove('.svn')
-            f = open(os.path.join(base,'.svn','entries'))
-            data = f.read()
-            f.close()
-
-            if data.startswith('10') or data.startswith('9') or data.startswith('8'):
-                data = map(str.splitlines,data.split('\n\x0c\n'))
-                del data[0][0]  # get rid of the '8' or '9' or '10'
-                dirurl = data[0][3]
-                localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0])
-            elif data.startswith('<?xml'):
-                dirurl = urlre.search(data).group(1)    # get repository URL
-                localrev = max([int(m.group(1)) for m in revre.finditer(data)]+[0])
-            else:
-                log.warn("unrecognized .svn/entries format; skipping %s", base)
-                dirs[:] = []
-                continue
-            if base==os.curdir:
-                base_url = dirurl+'/'   # save the root url
-            elif not dirurl.startswith(base_url):
-                dirs[:] = []
-                continue    # not part of the same svn tree, skip it
-            revision = max(revision, localrev)
-
-        return str(revision or get_pkg_info_revision())
-
-
-
-
-
-
-
-    def find_sources(self):
-        """Generate SOURCES.txt manifest file"""
-        manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
-        mm = manifest_maker(self.distribution)
-        mm.manifest = manifest_filename
-        mm.run()
-        self.filelist = mm.filelist
-
-    def check_broken_egg_info(self):
-        bei = self.egg_name+'.egg-info'
-        if self.egg_base != os.curdir:
-            bei = os.path.join(self.egg_base, bei)
-        if os.path.exists(bei):
-            log.warn(
-                "-"*78+'\n'
-                "Note: Your current .egg-info directory has a '-' in its name;"
-                '\nthis will not work correctly with "setup.py develop".\n\n'
-                'Please rename %s to %s to correct this problem.\n'+'-'*78,
-                bei, self.egg_info
-            )
-            self.broken_egg_info = self.egg_info
-            self.egg_info = bei     # make it work for now
-
-class FileList(_FileList):
-    """File list that accepts only existing, platform-independent paths"""
-
-    def append(self, item):
-        if item.endswith('\r'):     # Fix older sdists built on Windows
-            item = item[:-1]
-        path = convert_path(item)
-
-        if sys.version_info >= (3,):
-            try:
-                if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
-                    self.files.append(path)
-            except UnicodeEncodeError:
-                # Accept UTF-8 filenames even if LANG=C
-                if os.path.exists(path.encode('utf-8')):
-                    self.files.append(path)
-                else:
-                    log.warn("'%s' not %s encodable -- skipping", path,
-                        sys.getfilesystemencoding())
-        else:
-            if os.path.exists(path):
-                self.files.append(path)
-
-
-
-
-
-
-
-
-class manifest_maker(sdist):
-
-    template = "MANIFEST.in"
-
-    def initialize_options (self):
-        self.use_defaults = 1
-        self.prune = 1
-        self.manifest_only = 1
-        self.force_manifest = 1
-
-    def finalize_options(self):
-        pass
-
-    def run(self):
-        self.filelist = FileList()
-        if not os.path.exists(self.manifest):
-            self.write_manifest()   # it must exist so it'll get in the list
-        self.filelist.findall()
-        self.add_defaults()
-        if os.path.exists(self.template):
-            self.read_template()
-        self.prune_file_list()
-        self.filelist.sort()
-        self.filelist.remove_duplicates()
-        self.write_manifest()
-
-    def write_manifest (self):
-        """Write the file list in 'self.filelist' (presumably as filled in
-        by 'add_defaults()' and 'read_template()') to the manifest file
-        named by 'self.manifest'.
-        """
-        # The manifest must be UTF-8 encodable. See #303.
-        if sys.version_info >= (3,):
-            files = []
-            for file in self.filelist.files:
-                try:
-                    file.encode("utf-8")
-                except UnicodeEncodeError:
-                    log.warn("'%s' not UTF-8 encodable -- skipping" % file)
-                else:
-                    files.append(file)
-            self.filelist.files = files
-
-        files = self.filelist.files
-        if os.sep!='/':
-            files = [f.replace(os.sep,'/') for f in files]
-        self.execute(write_file, (self.manifest, files),
-                     "writing manifest file '%s'" % self.manifest)
-
-    def warn(self, msg):    # suppress missing-file warnings from sdist
-        if not msg.startswith("standard file not found:"):
-            sdist.warn(self, msg)
-
-    def add_defaults(self):
-        sdist.add_defaults(self)
-        self.filelist.append(self.template)
-        self.filelist.append(self.manifest)
-        rcfiles = list(walk_revctrl())
-        if rcfiles:
-            self.filelist.extend(rcfiles)
-        elif os.path.exists(self.manifest):
-            self.read_manifest()
-        ei_cmd = self.get_finalized_command('egg_info')
-        self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
-
-    def prune_file_list (self):
-        build = self.get_finalized_command('build')
-        base_dir = self.distribution.get_fullname()
-        self.filelist.exclude_pattern(None, prefix=build.build_base)
-        self.filelist.exclude_pattern(None, prefix=base_dir)
-        sep = re.escape(os.sep)
-        self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
-
-
-def write_file (filename, contents):
-    """Create a file with the specified name and write 'contents' (a
-    sequence of strings without line terminators) to it.
-    """
-    contents = "\n".join(contents)
-    if sys.version_info >= (3,):
-        contents = contents.encode("utf-8")
-    f = open(filename, "wb")        # always write POSIX-style manifest
-    f.write(contents)
-    f.close()
-
-
-
-
-
-
-
-
-
-
-
-
-
-def write_pkg_info(cmd, basename, filename):
-    log.info("writing %s", filename)
-    if not cmd.dry_run:
-        metadata = cmd.distribution.metadata
-        metadata.version, oldver = cmd.egg_version, metadata.version
-        metadata.name, oldname   = cmd.egg_name, metadata.name
-        try:
-            # write unescaped data to PKG-INFO, so older pkg_resources
-            # can still parse it
-            metadata.write_pkg_info(cmd.egg_info)
-        finally:
-            metadata.name, metadata.version = oldname, oldver
-
-        safe = getattr(cmd.distribution,'zip_safe',None)
-        import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
-
-def warn_depends_obsolete(cmd, basename, filename):
-    if os.path.exists(filename):
-        log.warn(
-            "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
-            "Use the install_requires/extras_require setup() args instead."
-        )
-
-
-def write_requirements(cmd, basename, filename):
-    dist = cmd.distribution
-    data = ['\n'.join(yield_lines(dist.install_requires or ()))]
-    for extra,reqs in (dist.extras_require or {}).items():
-        data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
-    cmd.write_or_delete_file("requirements", filename, ''.join(data))
-
-def write_toplevel_names(cmd, basename, filename):
-    pkgs = dict.fromkeys(
-        [k.split('.',1)[0]
-            for k in cmd.distribution.iter_distribution_names()
-        ]
-    )
-    cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
-
-
-
-def overwrite_arg(cmd, basename, filename):
-    write_arg(cmd, basename, filename, True)
-
-def write_arg(cmd, basename, filename, force=False):
-    argname = os.path.splitext(basename)[0]
-    value = getattr(cmd.distribution, argname, None)
-    if value is not None:
-        value = '\n'.join(value)+'\n'
-    cmd.write_or_delete_file(argname, filename, value, force)
-
-def write_entries(cmd, basename, filename):
-    ep = cmd.distribution.entry_points
-
-    if isinstance(ep,basestring) or ep is None:
-        data = ep
-    elif ep is not None:
-        data = []
-        for section, contents in ep.items():
-            if not isinstance(contents,basestring):
-                contents = EntryPoint.parse_group(section, contents)
-                contents = '\n'.join(map(str,contents.values()))
-            data.append('[%s]\n%s\n\n' % (section,contents))
-        data = ''.join(data)
-
-    cmd.write_or_delete_file('entry points', filename, data, True)
-
-def get_pkg_info_revision():
-    # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
-    # a subversion revision
-    #
-    if os.path.exists('PKG-INFO'):
-        f = open('PKG-INFO','rU')
-        for line in f:
-            match = re.match(r"Version:.*-r(\d+)\s*$", line)
-            if match:
-                return int(match.group(1))
-        f.close()
-    return 0
-
-
-
-#
diff --git a/vendor/distribute-0.6.34/setuptools/command/install.py b/vendor/distribute-0.6.34/setuptools/command/install.py
deleted file mode 100644
index 247c4f259c976db16d0a7b0e55bd69a75704c62d..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/install.py
+++ /dev/null
@@ -1,124 +0,0 @@
-import setuptools, sys, glob
-from distutils.command.install import install as _install
-from distutils.errors import DistutilsArgError
-
-class install(_install):
-    """Use easy_install to install the package, w/dependencies"""
-
-    user_options = _install.user_options + [
-        ('old-and-unmanageable', None, "Try not to use this!"),
-        ('single-version-externally-managed', None,
-            "used by system package builders to create 'flat' eggs"),
-    ]
-    boolean_options = _install.boolean_options + [
-        'old-and-unmanageable', 'single-version-externally-managed',
-    ]
-    new_commands = [
-        ('install_egg_info', lambda self: True),
-        ('install_scripts',  lambda self: True),
-    ]
-    _nc = dict(new_commands)
-
-    def initialize_options(self):
-        _install.initialize_options(self)
-        self.old_and_unmanageable = None
-        self.single_version_externally_managed = None
-        self.no_compile = None  # make DISTUTILS_DEBUG work right!
-
-    def finalize_options(self):
-        _install.finalize_options(self)
-        if self.root:
-            self.single_version_externally_managed = True
-        elif self.single_version_externally_managed:
-            if not self.root and not self.record:
-                raise DistutilsArgError(
-                    "You must specify --record or --root when building system"
-                    " packages"
-                )
-
-    def handle_extra_path(self):
-        if self.root or self.single_version_externally_managed:
-            # explicit backward-compatibility mode, allow extra_path to work
-            return _install.handle_extra_path(self)
-
-        # Ignore extra_path when installing an egg (or being run by another
-        # command without --root or --single-version-externally-managed
-        self.path_file = None
-        self.extra_dirs = ''
-
-
-    def run(self):
-        # Explicit request for old-style install?  Just do it
-        if self.old_and_unmanageable or self.single_version_externally_managed:
-            return _install.run(self)
-
-        # Attempt to detect whether we were called from setup() or by another
-        # command.  If we were called by setup(), our caller will be the
-        # 'run_command' method in 'distutils.dist', and *its* caller will be
-        # the 'run_commands' method.  If we were called any other way, our
-        # immediate caller *might* be 'run_command', but it won't have been
-        # called by 'run_commands'.  This is slightly kludgy, but seems to
-        # work.
-        #
-        caller = sys._getframe(2)
-        caller_module = caller.f_globals.get('__name__','')
-        caller_name = caller.f_code.co_name
-
-        if caller_module != 'distutils.dist' or caller_name!='run_commands':
-            # We weren't called from the command line or setup(), so we
-            # should run in backward-compatibility mode to support bdist_*
-            # commands.
-            _install.run(self)
-        else:
-            self.do_egg_install()
-
-
-
-
-
-
-    def do_egg_install(self):
-
-        easy_install = self.distribution.get_command_class('easy_install')
-
-        cmd = easy_install(
-            self.distribution, args="x", root=self.root, record=self.record,
-        )
-        cmd.ensure_finalized()  # finalize before bdist_egg munges install cmd
-        cmd.always_copy_from = '.'  # make sure local-dir eggs get installed
-
-        # pick up setup-dir .egg files only: no .egg-info
-        cmd.package_index.scan(glob.glob('*.egg'))
-
-        self.run_command('bdist_egg')
-        args = [self.distribution.get_command_obj('bdist_egg').egg_output]
-
-        if setuptools.bootstrap_install_from:
-            # Bootstrap self-installation of setuptools
-            args.insert(0, setuptools.bootstrap_install_from)
-
-        cmd.args = args
-        cmd.run()
-        setuptools.bootstrap_install_from = None
-
-# XXX Python 3.1 doesn't see _nc if this is inside the class
-install.sub_commands = [
-        cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
-    ] + install.new_commands
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/vendor/distribute-0.6.34/setuptools/command/install_egg_info.py b/vendor/distribute-0.6.34/setuptools/command/install_egg_info.py
deleted file mode 100644
index f44b34b555573061c7d0940e4031cee0414e99ec..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/install_egg_info.py
+++ /dev/null
@@ -1,125 +0,0 @@
-from setuptools import Command
-from setuptools.archive_util import unpack_archive
-from distutils import log, dir_util
-import os, shutil, pkg_resources
-
-class install_egg_info(Command):
-    """Install an .egg-info directory for the package"""
-
-    description = "Install an .egg-info directory for the package"
-
-    user_options = [
-        ('install-dir=', 'd', "directory to install to"),
-    ]
-
-    def initialize_options(self):
-        self.install_dir = None
-
-    def finalize_options(self):
-        self.set_undefined_options('install_lib',('install_dir','install_dir'))
-        ei_cmd = self.get_finalized_command("egg_info")
-        basename = pkg_resources.Distribution(
-            None, None, ei_cmd.egg_name, ei_cmd.egg_version
-        ).egg_name()+'.egg-info'
-        self.source = ei_cmd.egg_info
-        self.target = os.path.join(self.install_dir, basename)
-        self.outputs = [self.target]
-
-    def run(self):
-        self.run_command('egg_info')
-        target = self.target
-        if os.path.isdir(self.target) and not os.path.islink(self.target):
-            dir_util.remove_tree(self.target, dry_run=self.dry_run)
-        elif os.path.exists(self.target):
-            self.execute(os.unlink,(self.target,),"Removing "+self.target)
-        if not self.dry_run:
-            pkg_resources.ensure_directory(self.target)
-        self.execute(self.copytree, (),
-            "Copying %s to %s" % (self.source, self.target)
-        )
-        self.install_namespaces()
-
-    def get_outputs(self):
-        return self.outputs
-
-    def copytree(self):
-        # Copy the .egg-info tree to site-packages
-        def skimmer(src,dst):
-            # filter out source-control directories; note that 'src' is always
-            # a '/'-separated path, regardless of platform.  'dst' is a
-            # platform-specific path.
-            for skip in '.svn/','CVS/':
-                if src.startswith(skip) or '/'+skip in src:
-                    return None
-            self.outputs.append(dst)
-            log.debug("Copying %s to %s", src, dst)
-            return dst
-        unpack_archive(self.source, self.target, skimmer)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def install_namespaces(self):
-        nsp = self._get_all_ns_packages()
-        if not nsp: return
-        filename,ext = os.path.splitext(self.target)
-        filename += '-nspkg.pth'; self.outputs.append(filename)
-        log.info("Installing %s",filename)
-        if not self.dry_run:
-            f = open(filename,'wt')
-            for pkg in nsp:
-                # ensure pkg is not a unicode string under Python 2.7
-                pkg = str(pkg)
-                pth = tuple(pkg.split('.'))
-                trailer = '\n'
-                if '.' in pkg:
-                    trailer = (
-                        "; m and setattr(sys.modules[%r], %r, m)\n"
-                        % ('.'.join(pth[:-1]), pth[-1])
-                    )
-                f.write(
-                    "import sys,types,os; "
-                    "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
-                        "*%(pth)r); "
-                    "ie = os.path.exists(os.path.join(p,'__init__.py')); "
-                    "m = not ie and "
-                        "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
-                    "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
-                    "(p not in mp) and mp.append(p)%(trailer)s"
-                    % locals()
-                )
-            f.close()
-
-    def _get_all_ns_packages(self):
-        nsp = {}
-        for pkg in self.distribution.namespace_packages or []:
-            pkg = pkg.split('.')
-            while pkg:
-                nsp['.'.join(pkg)] = 1
-                pkg.pop()
-        nsp=list(nsp)
-        nsp.sort()  # set up shorter names first
-        return nsp
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/install_lib.py b/vendor/distribute-0.6.34/setuptools/command/install_lib.py
deleted file mode 100644
index 82afa1421bed5d8b892ca0ddeb7a6282fba5146d..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/install_lib.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from distutils.command.install_lib import install_lib as _install_lib
-import os
-
-class install_lib(_install_lib):
-    """Don't add compiled flags to filenames of non-Python files"""
-
-    def _bytecode_filenames (self, py_filenames):
-        bytecode_files = []
-        for py_file in py_filenames:
-            if not py_file.endswith('.py'):
-                continue
-            if self.compile:
-                bytecode_files.append(py_file + "c")
-            if self.optimize > 0:
-                bytecode_files.append(py_file + "o")
-
-        return bytecode_files
-
-    def run(self):
-        self.build()
-        outfiles = self.install()
-        if outfiles is not None:
-            # always compile, in case we have any extension stubs to deal with
-            self.byte_compile(outfiles)
-
-    def get_exclusions(self):
-        exclude = {}
-        nsp = self.distribution.namespace_packages
-
-        if (nsp and self.get_finalized_command('install')
-               .single_version_externally_managed
-        ):
-            for pkg in nsp:
-                parts = pkg.split('.')
-                while parts:
-                    pkgdir = os.path.join(self.install_dir, *parts)
-                    for f in '__init__.py', '__init__.pyc', '__init__.pyo':
-                        exclude[os.path.join(pkgdir,f)] = 1
-                    parts.pop()
-        return exclude
-
-    def copy_tree(
-        self, infile, outfile,
-        preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
-    ):
-        assert preserve_mode and preserve_times and not preserve_symlinks
-        exclude = self.get_exclusions()
-
-        if not exclude:
-            return _install_lib.copy_tree(self, infile, outfile)
-
-        # Exclude namespace package __init__.py* files from the output
-
-        from setuptools.archive_util import unpack_directory
-        from distutils import log
-
-        outfiles = []
-
-        def pf(src, dst):
-            if dst in exclude:
-                log.warn("Skipping installation of %s (namespace package)",dst)
-                return False
-
-            log.info("copying %s -> %s", src, os.path.dirname(dst))
-            outfiles.append(dst)
-            return dst
-
-        unpack_directory(infile, outfile, pf)
-        return outfiles
-
-    def get_outputs(self):
-        outputs = _install_lib.get_outputs(self)
-        exclude = self.get_exclusions()
-        if exclude:
-            return [f for f in outputs if f not in exclude]
-        return outputs
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/install_scripts.py b/vendor/distribute-0.6.34/setuptools/command/install_scripts.py
deleted file mode 100644
index 8245603597854c264873fd4b229d037b0b95f448..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/install_scripts.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from distutils.command.install_scripts import install_scripts \
-     as _install_scripts
-from pkg_resources import Distribution, PathMetadata, ensure_directory
-import os
-from distutils import log
-
-class install_scripts(_install_scripts):
-    """Do normal script install, plus any egg_info wrapper scripts"""
-
-    def initialize_options(self):
-        _install_scripts.initialize_options(self)
-        self.no_ep = False
-
-    def run(self):
-        from setuptools.command.easy_install import get_script_args
-        from setuptools.command.easy_install import sys_executable
-
-        self.run_command("egg_info")
-        if self.distribution.scripts:
-            _install_scripts.run(self)  # run first to set up self.outfiles
-        else:
-            self.outfiles = []
-        if self.no_ep:
-            # don't install entry point scripts into .egg file!
-            return
-
-        ei_cmd = self.get_finalized_command("egg_info")
-        dist = Distribution(
-            ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
-            ei_cmd.egg_name, ei_cmd.egg_version,
-        )
-        bs_cmd = self.get_finalized_command('build_scripts')
-        executable = getattr(bs_cmd,'executable',sys_executable)
-        is_wininst = getattr(
-            self.get_finalized_command("bdist_wininst"), '_is_running', False
-        )
-        for args in get_script_args(dist, executable, is_wininst):
-            self.write_script(*args)
-
-    def write_script(self, script_name, contents, mode="t", *ignored):
-        """Write an executable file to the scripts directory"""
-        from setuptools.command.easy_install import chmod, current_umask
-        log.info("Installing %s script to %s", script_name, self.install_dir)
-        target = os.path.join(self.install_dir, script_name)
-        self.outfiles.append(target)
-
-        mask = current_umask()
-        if not self.dry_run:
-            ensure_directory(target)
-            f = open(target,"w"+mode)
-            f.write(contents)
-            f.close()
-            chmod(target, 0777-mask)
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/register.py b/vendor/distribute-0.6.34/setuptools/command/register.py
deleted file mode 100644
index 3b2e085907ecaf1dd6251fd83572f93567c0864c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/register.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from distutils.command.register import register as _register
-
-class register(_register):
-    __doc__ = _register.__doc__
-
-    def run(self):
-        # Make sure that we are using valid current name/version info
-        self.run_command('egg_info')
-        _register.run(self)
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/rotate.py b/vendor/distribute-0.6.34/setuptools/command/rotate.py
deleted file mode 100644
index 11b6eae82b6a8bbb28f3908f4f9989840745d58c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/rotate.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-
-class rotate(Command):
-    """Delete older distributions"""
-
-    description = "delete older distributions, keeping N newest files"
-    user_options = [
-        ('match=',    'm', "patterns to match (required)"),
-        ('dist-dir=', 'd', "directory where the distributions are"),
-        ('keep=',     'k', "number of matching distributions to keep"),
-    ]
-
-    boolean_options = []
-
-    def initialize_options(self):
-        self.match = None
-        self.dist_dir = None
-        self.keep = None
-
-    def finalize_options(self):
-        if self.match is None:
-            raise DistutilsOptionError(
-                "Must specify one or more (comma-separated) match patterns "
-                "(e.g. '.zip' or '.egg')"
-            )
-        if self.keep is None:
-            raise DistutilsOptionError("Must specify number of files to keep")           
-        try:
-            self.keep = int(self.keep)
-        except ValueError:
-            raise DistutilsOptionError("--keep must be an integer")
-        if isinstance(self.match, basestring):
-            self.match = [
-                convert_path(p.strip()) for p in self.match.split(',')
-            ]
-        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
-
-    def run(self):
-        self.run_command("egg_info")
-        from glob import glob
-        for pattern in self.match:
-            pattern = self.distribution.get_name()+'*'+pattern
-            files = glob(os.path.join(self.dist_dir,pattern))
-            files = [(os.path.getmtime(f),f) for f in files]
-            files.sort()
-            files.reverse()
-
-            log.info("%d file(s) matching %s", len(files), pattern)
-            files = files[self.keep:]
-            for (t,f) in files:
-                log.info("Deleting %s", f)
-                if not self.dry_run:
-                    os.unlink(f)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/saveopts.py b/vendor/distribute-0.6.34/setuptools/command/saveopts.py
deleted file mode 100644
index 1180a440c920fd6ce1a0e55bb803b031fc301336..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/saveopts.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import distutils, os
-from setuptools import Command
-from setuptools.command.setopt import edit_config, option_base
-
-class saveopts(option_base):
-    """Save command-line options to a file"""
-
-    description = "save supplied options to setup.cfg or other config file"
-
-    def run(self):
-        dist = self.distribution
-        commands = dist.command_options.keys()
-        settings = {}
-
-        for cmd in commands:
-
-            if cmd=='saveopts':
-                continue    # don't save our own options!
-
-            for opt,(src,val) in dist.get_option_dict(cmd).items():
-                if src=="command line":
-                    settings.setdefault(cmd,{})[opt] = val
-
-        edit_config(self.filename, settings, self.dry_run)
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/sdist.py b/vendor/distribute-0.6.34/setuptools/command/sdist.py
deleted file mode 100644
index 2fa3771aa6f27ca930367279e18026820f477b9e..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/sdist.py
+++ /dev/null
@@ -1,313 +0,0 @@
-from distutils.command.sdist import sdist as _sdist
-from distutils.util import convert_path
-from distutils import log
-import os, re, sys, pkg_resources
-from glob import glob
-
-READMES = ('README', 'README.rst', 'README.txt')
-
-entities = [
-    ("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
-    ("&amp;", "&")
-]
-
-def unescape(data):
-    for old,new in entities:
-        data = data.replace(old,new)
-    return data
-
-def re_finder(pattern, postproc=None):
-    def find(dirname, filename):
-        f = open(filename,'rU')
-        data = f.read()
-        f.close()
-        for match in pattern.finditer(data):
-            path = match.group(1)
-            if postproc:
-                path = postproc(path)
-            yield joinpath(dirname,path)
-    return find
-
-def joinpath(prefix,suffix):
-    if not prefix:
-        return suffix
-    return os.path.join(prefix,suffix)
-
-
-
-
-
-
-
-
-
-
-def walk_revctrl(dirname=''):
-    """Find all files under revision control"""
-    for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
-        for item in ep.load()(dirname):
-            yield item
-
-def _default_revctrl(dirname=''):
-    for path, finder in finders:
-        path = joinpath(dirname,path)
-        if os.path.isfile(path):
-            for path in finder(dirname,path):
-                if os.path.isfile(path):
-                    yield path
-                elif os.path.isdir(path):
-                    for item in _default_revctrl(path):
-                        yield item
-
-def externals_finder(dirname, filename):
-    """Find any 'svn:externals' directories"""
-    found = False
-    f = open(filename,'rt')
-    for line in iter(f.readline, ''):    # can't use direct iter!
-        parts = line.split()
-        if len(parts)==2:
-            kind,length = parts
-            data = f.read(int(length))
-            if kind=='K' and data=='svn:externals':
-                found = True
-            elif kind=='V' and found:
-                f.close()
-                break
-    else:
-        f.close()
-        return
-
-    for line in data.splitlines():
-        parts = line.split()
-        if parts:
-            yield joinpath(dirname, parts[0])
-
-
-entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
-
-def entries_finder(dirname, filename):
-    f = open(filename,'rU')
-    data = f.read()
-    f.close()
-    if data.startswith('10') or data.startswith('9') or data.startswith('8'):
-        for record in map(str.splitlines, data.split('\n\x0c\n')[1:]):
-            # subversion 1.6/1.5/1.4
-            if not record or len(record)>=6 and record[5]=="delete":
-                continue    # skip deleted
-            yield joinpath(dirname, record[0])
-    elif data.startswith('<?xml'):
-        for match in entries_pattern.finditer(data):
-            yield joinpath(dirname,unescape(match.group(1)))
-    else:
-        log.warn("unrecognized .svn/entries format in %s", os.path.abspath(dirname))
-
-
-finders = [
-    (convert_path('CVS/Entries'),
-        re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
-    (convert_path('.svn/entries'), entries_finder),
-    (convert_path('.svn/dir-props'), externals_finder),
-    (convert_path('.svn/dir-prop-base'), externals_finder),  # svn 1.4
-]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class sdist(_sdist):
-    """Smart sdist that finds anything supported by revision control"""
-
-    user_options = [
-        ('formats=', None,
-         "formats for source distribution (comma-separated list)"),
-        ('keep-temp', 'k',
-         "keep the distribution tree around after creating " +
-         "archive file(s)"),
-        ('dist-dir=', 'd',
-         "directory to put the source distribution archive(s) in "
-         "[default: dist]"),
-        ]
-
-    negative_opt = {}
-
-    def run(self):
-        self.run_command('egg_info')
-        ei_cmd = self.get_finalized_command('egg_info')
-        self.filelist = ei_cmd.filelist
-        self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
-        self.check_readme()
-
-        # Run sub commands
-        for cmd_name in self.get_sub_commands():
-            self.run_command(cmd_name)
-
-        # Call check_metadata only if no 'check' command
-        # (distutils <= 2.6)
-        import distutils.command
-        if 'check' not in distutils.command.__all__:
-            self.check_metadata()
-            
-        self.make_distribution()
-
-        dist_files = getattr(self.distribution,'dist_files',[])
-        for file in self.archive_files:
-            data = ('sdist', '', file)
-            if data not in dist_files:
-                dist_files.append(data)
-
-    def add_defaults(self):
-        standards = [READMES,
-                     self.distribution.script_name]
-        for fn in standards:
-            if isinstance(fn, tuple):
-                alts = fn
-                got_it = 0
-                for fn in alts:
-                    if os.path.exists(fn):
-                        got_it = 1
-                        self.filelist.append(fn)
-                        break
-
-                if not got_it:
-                    self.warn("standard file not found: should have one of " +
-                              ', '.join(alts))
-            else:
-                if os.path.exists(fn):
-                    self.filelist.append(fn)
-                else:
-                    self.warn("standard file '%s' not found" % fn)
-
-        optional = ['test/test*.py', 'setup.cfg']
-        for pattern in optional:
-            files = filter(os.path.isfile, glob(pattern))
-            if files:
-                self.filelist.extend(files)
-
-        # getting python files
-        if self.distribution.has_pure_modules():
-            build_py = self.get_finalized_command('build_py')
-            self.filelist.extend(build_py.get_source_files())
-            # This functionality is incompatible with include_package_data, and
-            # will in fact create an infinite recursion if include_package_data
-            # is True.  Use of include_package_data will imply that
-            # distutils-style automatic handling of package_data is disabled
-            if not self.distribution.include_package_data:
-                for _, src_dir, _, filenames in build_py.data_files:
-                    self.filelist.extend([os.path.join(src_dir, filename)
-                                          for filename in filenames])
-
-        if self.distribution.has_ext_modules():
-            build_ext = self.get_finalized_command('build_ext')
-            self.filelist.extend(build_ext.get_source_files())
-
-        if self.distribution.has_c_libraries():
-            build_clib = self.get_finalized_command('build_clib')
-            self.filelist.extend(build_clib.get_source_files())
-
-        if self.distribution.has_scripts():
-            build_scripts = self.get_finalized_command('build_scripts')
-            self.filelist.extend(build_scripts.get_source_files())
-
-    def __read_template_hack(self):
-        # This grody hack closes the template file (MANIFEST.in) if an
-        #  exception occurs during read_template.
-        # Doing so prevents an error when easy_install attempts to delete the
-        #  file.
-        try:
-            _sdist.read_template(self)
-        except:
-            sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
-            raise
-    # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
-    #  has been fixed, so only override the method if we're using an earlier
-    #  Python.
-    if (
-            sys.version_info < (2,7,2)
-            or (3,0) <= sys.version_info < (3,1,4)
-            or (3,2) <= sys.version_info < (3,2,1)
-        ):
-        read_template = __read_template_hack
-
-    def check_readme(self):
-        for f in READMES:
-            if os.path.exists(f):
-                return
-        else:
-            self.warn(
-                "standard file not found: should have one of " +', '.join(READMES)
-            )
-
-
-    def make_release_tree(self, base_dir, files):
-        _sdist.make_release_tree(self, base_dir, files)
-
-        # Save any egg_info command line options used to create this sdist
-        dest = os.path.join(base_dir, 'setup.cfg')
-        if hasattr(os,'link') and os.path.exists(dest):
-            # unlink and re-copy, since it might be hard-linked, and
-            # we don't want to change the source version
-            os.unlink(dest)
-            self.copy_file('setup.cfg', dest)
-
-        self.get_finalized_command('egg_info').save_version_info(dest)
-
-    def _manifest_is_not_generated(self):
-        # check for special comment used in 2.7.1 and higher
-        if not os.path.isfile(self.manifest):
-            return False
-
-        fp = open(self.manifest, 'rbU')
-        try:
-            first_line = fp.readline()
-        finally:
-            fp.close()
-        return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
-
-    def read_manifest(self):
-        """Read the manifest file (named by 'self.manifest') and use it to
-        fill in 'self.filelist', the list of files to include in the source
-        distribution.
-        """
-        log.info("reading manifest file '%s'", self.manifest)
-        manifest = open(self.manifest, 'rbU')
-        for line in manifest:
-            # The manifest must contain UTF-8. See #303.
-            if sys.version_info >= (3,):
-                try:
-                    line = line.decode('UTF-8')
-                except UnicodeDecodeError:
-                    log.warn("%r not UTF-8 decodable -- skipping" % line)
-                    continue
-            # ignore comments and blank lines
-            line = line.strip()
-            if line.startswith('#') or not line:
-                continue
-            self.filelist.append(line)
-        manifest.close()
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/vendor/distribute-0.6.34/setuptools/command/setopt.py b/vendor/distribute-0.6.34/setuptools/command/setopt.py
deleted file mode 100644
index dbf3a94ec126d98283956bd2eba5514000557f39..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/setopt.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import distutils, os
-from setuptools import Command
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import *
-
-__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
-
-
-def config_file(kind="local"):
-    """Get the filename of the distutils, local, global, or per-user config
-
-    `kind` must be one of "local", "global", or "user"
-    """
-    if kind=='local':
-        return 'setup.cfg'
-    if kind=='global':
-        return os.path.join(
-            os.path.dirname(distutils.__file__),'distutils.cfg'
-        )
-    if kind=='user':
-        dot = os.name=='posix' and '.' or ''
-        return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
-    raise ValueError(
-        "config_file() type must be 'local', 'global', or 'user'", kind
-    )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def edit_config(filename, settings, dry_run=False):
-    """Edit a configuration file to include `settings`
-
-    `settings` is a dictionary of dictionaries or ``None`` values, keyed by
-    command/section name.  A ``None`` value means to delete the entire section,
-    while a dictionary lists settings to be changed or deleted in that section.
-    A setting of ``None`` means to delete that setting.
-    """
-    from ConfigParser import RawConfigParser
-    log.debug("Reading configuration from %s", filename)
-    opts = RawConfigParser()
-    opts.read([filename])
-    for section, options in settings.items():
-        if options is None:
-            log.info("Deleting section [%s] from %s", section, filename)
-            opts.remove_section(section)
-        else:
-            if not opts.has_section(section):
-                log.debug("Adding new section [%s] to %s", section, filename)
-                opts.add_section(section)
-            for option,value in options.items():
-                if value is None:
-                    log.debug("Deleting %s.%s from %s",
-                        section, option, filename
-                    )
-                    opts.remove_option(section,option)
-                    if not opts.options(section):
-                        log.info("Deleting empty [%s] section from %s",
-                                  section, filename)
-                        opts.remove_section(section)
-                else:
-                    log.debug(
-                        "Setting %s.%s to %r in %s",
-                        section, option, value, filename
-                    )
-                    opts.set(section,option,value)
-
-    log.info("Writing %s", filename)
-    if not dry_run:
-        f = open(filename,'w'); opts.write(f); f.close()
-
-class option_base(Command):
-    """Abstract base class for commands that mess with config files"""
-    
-    user_options = [
-        ('global-config', 'g',
-                 "save options to the site-wide distutils.cfg file"),
-        ('user-config', 'u',
-                 "save options to the current user's pydistutils.cfg file"),
-        ('filename=', 'f',
-                 "configuration file to use (default=setup.cfg)"),
-    ]
-
-    boolean_options = [
-        'global-config', 'user-config',
-    ]    
-
-    def initialize_options(self):
-        self.global_config = None
-        self.user_config   = None
-        self.filename = None
-
-    def finalize_options(self):
-        filenames = []
-        if self.global_config:
-            filenames.append(config_file('global'))
-        if self.user_config:
-            filenames.append(config_file('user'))
-        if self.filename is not None:
-            filenames.append(self.filename)
-        if not filenames:
-            filenames.append(config_file('local'))
-        if len(filenames)>1:
-            raise DistutilsOptionError(
-                "Must specify only one configuration file option",
-                filenames
-            )
-        self.filename, = filenames    
-
-
-
-
-class setopt(option_base):
-    """Save command-line options to a file"""
-
-    description = "set an option in setup.cfg or another config file"
-
-    user_options = [
-        ('command=', 'c', 'command to set an option for'),
-        ('option=',  'o',  'option to set'),
-        ('set-value=',   's', 'value of the option'),
-        ('remove',   'r', 'remove (unset) the value'), 
-    ] + option_base.user_options
-
-    boolean_options = option_base.boolean_options + ['remove']
-
-    def initialize_options(self):
-        option_base.initialize_options(self)
-        self.command = None
-        self.option = None
-        self.set_value = None
-        self.remove = None
-
-    def finalize_options(self):
-        option_base.finalize_options(self)
-        if self.command is None or self.option is None:
-            raise DistutilsOptionError("Must specify --command *and* --option")
-        if self.set_value is None and not self.remove:
-            raise DistutilsOptionError("Must specify --set-value or --remove")
-
-    def run(self):
-        edit_config(
-            self.filename, {
-                self.command: {self.option.replace('-','_'):self.set_value}
-            },
-            self.dry_run
-        )
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/test.py b/vendor/distribute-0.6.34/setuptools/command/test.py
deleted file mode 100644
index a02ac1424068b39efad4a180ede916f1fe183d79..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/test.py
+++ /dev/null
@@ -1,198 +0,0 @@
-from setuptools import Command
-from distutils.errors import DistutilsOptionError
-import sys
-from pkg_resources import *
-from pkg_resources import _namespace_packages
-from unittest import TestLoader, main
-
-class ScanningLoader(TestLoader):
-
-    def loadTestsFromModule(self, module):
-        """Return a suite of all tests cases contained in the given module
-
-        If the module is a package, load tests from all the modules in it.
-        If the module has an ``additional_tests`` function, call it and add
-        the return value to the tests.
-        """
-        tests = []
-        if module.__name__!='setuptools.tests.doctest':  # ugh
-            tests.append(TestLoader.loadTestsFromModule(self,module))
-
-        if hasattr(module, "additional_tests"):
-            tests.append(module.additional_tests())
-
-        if hasattr(module, '__path__'):
-            for file in resource_listdir(module.__name__, ''):
-                if file.endswith('.py') and file!='__init__.py':
-                    submodule = module.__name__+'.'+file[:-3]
-                else:
-                    if resource_exists(
-                        module.__name__, file+'/__init__.py'
-                    ):
-                        submodule = module.__name__+'.'+file
-                    else:
-                        continue
-                tests.append(self.loadTestsFromName(submodule))
-
-        if len(tests)!=1:
-            return self.suiteClass(tests)
-        else:
-            return tests[0] # don't create a nested suite for only one return
-
-
-class test(Command):
-
-    """Command to run unit tests after in-place build"""
-
-    description = "run unit tests after in-place build"
-
-    user_options = [
-        ('test-module=','m', "Run 'test_suite' in specified module"),
-        ('test-suite=','s',
-            "Test suite to run (e.g. 'some_module.test_suite')"),
-    ]
-
-    def initialize_options(self):
-        self.test_suite = None
-        self.test_module = None
-        self.test_loader = None
-
-
-    def finalize_options(self):
-
-        if self.test_suite is None:
-            if self.test_module is None:
-                self.test_suite = self.distribution.test_suite
-            else:
-                self.test_suite = self.test_module+".test_suite"
-        elif self.test_module:
-            raise DistutilsOptionError(
-                "You may specify a module or a suite, but not both"
-            )
-
-        self.test_args = [self.test_suite]
-
-        if self.verbose:
-            self.test_args.insert(0,'--verbose')
-        if self.test_loader is None:
-            self.test_loader = getattr(self.distribution,'test_loader',None)
-        if self.test_loader is None:
-            self.test_loader = "setuptools.command.test:ScanningLoader"
-
-
-
-    def with_project_on_sys_path(self, func):
-        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
-            # If we run 2to3 we can not do this inplace:
-
-            # Ensure metadata is up-to-date
-            self.reinitialize_command('build_py', inplace=0)
-            self.run_command('build_py')
-            bpy_cmd = self.get_finalized_command("build_py")
-            build_path = normalize_path(bpy_cmd.build_lib)
-
-            # Build extensions
-            self.reinitialize_command('egg_info', egg_base=build_path)
-            self.run_command('egg_info')
-
-            self.reinitialize_command('build_ext', inplace=0)
-            self.run_command('build_ext')
-        else:
-            # Without 2to3 inplace works fine:
-            self.run_command('egg_info')
-
-            # Build extensions in-place
-            self.reinitialize_command('build_ext', inplace=1)
-            self.run_command('build_ext')
-
-        ei_cmd = self.get_finalized_command("egg_info")
-
-        old_path = sys.path[:]
-        old_modules = sys.modules.copy()
-
-        try:
-            sys.path.insert(0, normalize_path(ei_cmd.egg_base))
-            working_set.__init__()
-            add_activation_listener(lambda dist: dist.activate())
-            require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
-            func()
-        finally:
-            sys.path[:] = old_path
-            sys.modules.clear()
-            sys.modules.update(old_modules)
-            working_set.__init__()
-
-
-    def run(self):
-        if self.distribution.install_requires:
-            self.distribution.fetch_build_eggs(self.distribution.install_requires)
-        if self.distribution.tests_require:
-            self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
-        if self.test_suite:
-            cmd = ' '.join(self.test_args)
-            if self.dry_run:
-                self.announce('skipping "unittest %s" (dry run)' % cmd)
-            else:
-                self.announce('running "unittest %s"' % cmd)
-                self.with_project_on_sys_path(self.run_tests)
-
-
-    def run_tests(self):
-        import unittest
-
-        # Purge modules under test from sys.modules. The test loader will
-        # re-import them from the build location. Required when 2to3 is used
-        # with namespace packages.
-        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
-            module = self.test_args[-1].split('.')[0]
-            if module in _namespace_packages:
-                del_modules = []
-                if module in sys.modules:
-                    del_modules.append(module)
-                module += '.'
-                for name in sys.modules:
-                    if name.startswith(module):
-                        del_modules.append(name)
-                map(sys.modules.__delitem__, del_modules)
-
-        loader_ep = EntryPoint.parse("x="+self.test_loader)
-        loader_class = loader_ep.load(require=False)
-        cks = loader_class()
-        unittest.main(
-            None, None, [unittest.__file__]+self.test_args,
-            testLoader = cks
-        )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/upload.py b/vendor/distribute-0.6.34/setuptools/command/upload.py
deleted file mode 100644
index 21b9615c42ac752723e2223b66e40b63cf9f9521..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/upload.py
+++ /dev/null
@@ -1,185 +0,0 @@
-"""distutils.command.upload
-
-Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
-
-from distutils.errors import *
-from distutils.core import Command
-from distutils.spawn import spawn
-from distutils import log
-try:
-    from hashlib import md5
-except ImportError:
-    from md5 import md5
-import os
-import socket
-import platform
-import ConfigParser
-import httplib
-import base64
-import urlparse
-import cStringIO as StringIO
-
-class upload(Command):
-
-    description = "upload binary package to PyPI"
-
-    DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
-
-    user_options = [
-        ('repository=', 'r',
-         "url of repository [default: %s]" % DEFAULT_REPOSITORY),
-        ('show-response', None,
-         'display full response text from server'),
-        ('sign', 's',
-         'sign files to upload using gpg'),
-        ('identity=', 'i', 'GPG identity used to sign files'),
-        ]
-    boolean_options = ['show-response', 'sign']
-
-    def initialize_options(self):
-        self.username = ''
-        self.password = ''
-        self.repository = ''
-        self.show_response = 0
-        self.sign = False
-        self.identity = None
-
-    def finalize_options(self):
-        if self.identity and not self.sign:
-            raise DistutilsOptionError(
-                "Must use --sign for --identity to have meaning"
-            )
-        if os.environ.has_key('HOME'):
-            rc = os.path.join(os.environ['HOME'], '.pypirc')
-            if os.path.exists(rc):
-                self.announce('Using PyPI login from %s' % rc)
-                config = ConfigParser.ConfigParser({
-                        'username':'',
-                        'password':'',
-                        'repository':''})
-                config.read(rc)
-                if not self.repository:
-                    self.repository = config.get('server-login', 'repository')
-                if not self.username:
-                    self.username = config.get('server-login', 'username')
-                if not self.password:
-                    self.password = config.get('server-login', 'password')
-        if not self.repository:
-            self.repository = self.DEFAULT_REPOSITORY
-
-    def run(self):
-        if not self.distribution.dist_files:
-            raise DistutilsOptionError("No dist file created in earlier command")
-        for command, pyversion, filename in self.distribution.dist_files:
-            self.upload_file(command, pyversion, filename)
-
-    def upload_file(self, command, pyversion, filename):
-        # Sign if requested
-        if self.sign:
-            gpg_args = ["gpg", "--detach-sign", "-a", filename]
-            if self.identity:
-                gpg_args[2:2] = ["--local-user", self.identity]
-            spawn(gpg_args,
-                  dry_run=self.dry_run)
-
-        # Fill in the data
-        f = open(filename,'rb')
-        content = f.read()
-        f.close()
-        basename = os.path.basename(filename)
-        comment = ''
-        if command=='bdist_egg' and self.distribution.has_ext_modules():
-            comment = "built on %s" % platform.platform(terse=1)
-        data = {
-            ':action':'file_upload',
-            'protocol_version':'1',
-            'name':self.distribution.get_name(),
-            'version':self.distribution.get_version(),
-            'content':(basename,content),
-            'filetype':command,
-            'pyversion':pyversion,
-            'md5_digest':md5(content).hexdigest(),
-            }
-        if command == 'bdist_rpm':
-            dist, version, id = platform.dist()
-            if dist:
-                comment = 'built for %s %s' % (dist, version)
-        elif command == 'bdist_dumb':
-            comment = 'built for %s' % platform.platform(terse=1)
-        data['comment'] = comment
-
-        if self.sign:
-            asc_file = open(filename + ".asc")
-            data['gpg_signature'] = (os.path.basename(filename) + ".asc", asc_file.read())
-            asc_file.close()
-
-        # set up the authentication
-        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
-
-        # Build up the MIME payload for the POST data
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = '\n--' + boundary
-        end_boundary = sep_boundary + '--'
-        body = StringIO.StringIO()
-        for key, value in data.items():
-            # handle multiple entries for the same name
-            if type(value) != type([]):
-                value = [value]
-            for value in value:
-                if type(value) is tuple:
-                    fn = ';filename="%s"' % value[0]
-                    value = value[1]
-                else:
-                    fn = ""
-                value = str(value)
-                body.write(sep_boundary)
-                body.write('\nContent-Disposition: form-data; name="%s"'%key)
-                body.write(fn)
-                body.write("\n\n")
-                body.write(value)
-                if value and value[-1] == '\r':
-                    body.write('\n')  # write an extra newline (lurve Macs)
-        body.write(end_boundary)
-        body.write("\n")
-        body = body.getvalue()
-
-        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
-
-        # build the Request
-        # We can't use urllib2 since we need to send the Basic
-        # auth right with the first request
-        schema, netloc, url, params, query, fragments = \
-            urlparse.urlparse(self.repository)
-        assert not params and not query and not fragments
-        if schema == 'http':
-            http = httplib.HTTPConnection(netloc)
-        elif schema == 'https':
-            http = httplib.HTTPSConnection(netloc)
-        else:
-            raise AssertionError, "unsupported schema "+schema
-
-        data = ''
-        loglevel = log.INFO
-        try:
-            http.connect()
-            http.putrequest("POST", url)
-            http.putheader('Content-type',
-                           'multipart/form-data; boundary=%s'%boundary)
-            http.putheader('Content-length', str(len(body)))
-            http.putheader('Authorization', auth)
-            http.endheaders()
-            http.send(body)
-        except socket.error, e:
-            self.announce(str(e), log.ERROR)
-            return
-
-        r = http.getresponse()
-        if r.status == 200:
-            self.announce('Server response (%s): %s' % (r.status, r.reason),
-                          log.INFO)
-        else:
-            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
-                          log.ERROR)
-        if self.show_response:
-            print '-'*75, r.read(), '-'*75
-
diff --git a/vendor/distribute-0.6.34/setuptools/command/upload_docs.py b/vendor/distribute-0.6.34/setuptools/command/upload_docs.py
deleted file mode 100644
index 1d5a7445121ed3ec7b8bcb02420ddcdc1bb24276..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/command/upload_docs.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# -*- coding: utf-8 -*-
-"""upload_docs
-
-Implements a Distutils 'upload_docs' subcommand (upload documentation to
-PyPI's packages.python.org).
-"""
-
-import os
-import socket
-import zipfile
-import httplib
-import urlparse
-import tempfile
-import sys
-import shutil
-
-from base64 import standard_b64encode
-from pkg_resources import iter_entry_points
-
-from distutils import log
-from distutils.errors import DistutilsOptionError
-
-try:
-    from distutils.command.upload import upload
-except ImportError:
-    from setuptools.command.upload import upload
-
-
-# This is not just a replacement for byte literals
-# but works as a general purpose encoder
-def b(s, encoding='utf-8'):
-    if isinstance(s, unicode):
-        return s.encode(encoding)
-    return s
-
-
-class upload_docs(upload):
-
-    description = 'Upload documentation to PyPI'
-
-    user_options = [
-        ('repository=', 'r',
-         "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
-        ('show-response', None,
-         'display full response text from server'),
-        ('upload-dir=', None, 'directory to upload'),
-        ]
-    boolean_options = upload.boolean_options
-
-    def has_sphinx(self):
-        if self.upload_dir is None:
-            for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
-                return True
-
-    sub_commands = [('build_sphinx', has_sphinx)]
-
-    def initialize_options(self):
-        upload.initialize_options(self)
-        self.upload_dir = None
-        self.target_dir = None
-
-    def finalize_options(self):
-        upload.finalize_options(self)
-        if self.upload_dir is None:
-            if self.has_sphinx():
-                build_sphinx = self.get_finalized_command('build_sphinx')
-                self.target_dir = build_sphinx.builder_target_dir
-            else:
-                build = self.get_finalized_command('build')
-                self.target_dir = os.path.join(build.build_base, 'docs')
-        else:
-            self.ensure_dirname('upload_dir')
-            self.target_dir = self.upload_dir
-        self.announce('Using upload directory %s' % self.target_dir)
-
-    def create_zipfile(self, filename):
-        zip_file = zipfile.ZipFile(filename, "w")
-        try:
-            self.mkpath(self.target_dir)  # just in case
-            for root, dirs, files in os.walk(self.target_dir):
-                if root == self.target_dir and not files:
-                    raise DistutilsOptionError(
-                        "no files found in upload directory '%s'"
-                        % self.target_dir)
-                for name in files:
-                    full = os.path.join(root, name)
-                    relative = root[len(self.target_dir):].lstrip(os.path.sep)
-                    dest = os.path.join(relative, name)
-                    zip_file.write(full, dest)
-        finally:
-            zip_file.close()
-
-    def run(self):
-        # Run sub commands
-        for cmd_name in self.get_sub_commands():
-            self.run_command(cmd_name)
-
-        tmp_dir = tempfile.mkdtemp()
-        name = self.distribution.metadata.get_name()
-        zip_file = os.path.join(tmp_dir, "%s.zip" % name)
-        try:
-            self.create_zipfile(zip_file)
-            self.upload_file(zip_file)
-        finally:
-            shutil.rmtree(tmp_dir)
-
-    def upload_file(self, filename):
-        f = open(filename, 'rb')
-        content = f.read()
-        f.close()
-        meta = self.distribution.metadata
-        data = {
-            ':action': 'doc_upload',
-            'name': meta.get_name(),
-            'content': (os.path.basename(filename), content),
-        }
-        # set up the authentication
-        credentials = b(self.username + ':' + self.password)
-        credentials = standard_b64encode(credentials)
-        if sys.version_info >= (3,):
-            credentials = credentials.decode('ascii')
-        auth = "Basic " + credentials
-
-        # Build up the MIME payload for the POST data
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = b('\n--') + b(boundary)
-        end_boundary = sep_boundary + b('--')
-        body = []
-        for key, values in data.iteritems():
-            title = '\nContent-Disposition: form-data; name="%s"' % key
-            # handle multiple entries for the same name
-            if type(values) != type([]):
-                values = [values]
-            for value in values:
-                if type(value) is tuple:
-                    title += '; filename="%s"' % value[0]
-                    value = value[1]
-                else:
-                    value = b(value)
-                body.append(sep_boundary)
-                body.append(b(title))
-                body.append(b("\n\n"))
-                body.append(value)
-                if value and value[-1:] == b('\r'):
-                    body.append(b('\n'))  # write an extra newline (lurve Macs)
-        body.append(end_boundary)
-        body.append(b("\n"))
-        body = b('').join(body)
-
-        self.announce("Submitting documentation to %s" % (self.repository),
-                      log.INFO)
-
-        # build the Request
-        # We can't use urllib2 since we need to send the Basic
-        # auth right with the first request
-        schema, netloc, url, params, query, fragments = \
-            urlparse.urlparse(self.repository)
-        assert not params and not query and not fragments
-        if schema == 'http':
-            conn = httplib.HTTPConnection(netloc)
-        elif schema == 'https':
-            conn = httplib.HTTPSConnection(netloc)
-        else:
-            raise AssertionError("unsupported schema "+schema)
-
-        data = ''
-        loglevel = log.INFO
-        try:
-            conn.connect()
-            conn.putrequest("POST", url)
-            conn.putheader('Content-type',
-                           'multipart/form-data; boundary=%s'%boundary)
-            conn.putheader('Content-length', str(len(body)))
-            conn.putheader('Authorization', auth)
-            conn.endheaders()
-            conn.send(body)
-        except socket.error, e:
-            self.announce(str(e), log.ERROR)
-            return
-
-        r = conn.getresponse()
-        if r.status == 200:
-            self.announce('Server response (%s): %s' % (r.status, r.reason),
-                          log.INFO)
-        elif r.status == 301:
-            location = r.getheader('Location')
-            if location is None:
-                location = 'http://packages.python.org/%s/' % meta.get_name()
-            self.announce('Upload successful. Visit %s' % location,
-                          log.INFO)
-        else:
-            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
-                          log.ERROR)
-        if self.show_response:
-            print '-'*75, r.read(), '-'*75
diff --git a/vendor/distribute-0.6.34/setuptools/depends.py b/vendor/distribute-0.6.34/setuptools/depends.py
deleted file mode 100644
index 4b7b343760a38c7b3d979c09a9d747be146879b5..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/depends.py
+++ /dev/null
@@ -1,246 +0,0 @@
-from __future__ import generators
-import sys, imp, marshal
-from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
-from distutils.version import StrictVersion, LooseVersion
-
-__all__ = [
-    'Require', 'find_module', 'get_module_constant', 'extract_constant'
-]
-
-class Require:
-    """A prerequisite to building or installing a distribution"""
-
-    def __init__(self,name,requested_version,module,homepage='',
-        attribute=None,format=None
-    ):
-
-        if format is None and requested_version is not None:
-            format = StrictVersion
-
-        if format is not None:
-            requested_version = format(requested_version)
-            if attribute is None:
-                attribute = '__version__'
-
-        self.__dict__.update(locals())
-        del self.self
-
-
-    def full_name(self):
-        """Return full package/distribution name, w/version"""
-        if self.requested_version is not None:
-            return '%s-%s' % (self.name,self.requested_version)
-        return self.name
-
-
-    def version_ok(self,version):
-        """Is 'version' sufficiently up-to-date?"""
-        return self.attribute is None or self.format is None or \
-            str(version)<>"unknown" and version >= self.requested_version
-
-
-    def get_version(self, paths=None, default="unknown"):
-
-        """Get version number of installed module, 'None', or 'default'
-
-        Search 'paths' for module.  If not found, return 'None'.  If found,
-        return the extracted version attribute, or 'default' if no version
-        attribute was specified, or the value cannot be determined without
-        importing the module.  The version is formatted according to the
-        requirement's version format (if any), unless it is 'None' or the
-        supplied 'default'.
-        """
-
-        if self.attribute is None:
-            try:
-                f,p,i = find_module(self.module,paths)
-                if f: f.close()
-                return default
-            except ImportError:
-                return None
-
-        v = get_module_constant(self.module,self.attribute,default,paths)
-
-        if v is not None and v is not default and self.format is not None:
-            return self.format(v)
-
-        return v
-
-
-    def is_present(self,paths=None):
-        """Return true if dependency is present on 'paths'"""
-        return self.get_version(paths) is not None
-
-
-    def is_current(self,paths=None):
-        """Return true if dependency is present and up-to-date on 'paths'"""
-        version = self.get_version(paths)
-        if version is None:
-            return False
-        return self.version_ok(version)
-
-
-def _iter_code(code):
-
-    """Yield '(op,arg)' pair for each operation in code object 'code'"""
-
-    from array import array
-    from dis import HAVE_ARGUMENT, EXTENDED_ARG
-
-    bytes = array('b',code.co_code)
-    eof = len(code.co_code)
-
-    ptr = 0
-    extended_arg = 0
-
-    while ptr<eof:
-
-        op = bytes[ptr]
-
-        if op>=HAVE_ARGUMENT:
-
-            arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
-            ptr += 3
-
-            if op==EXTENDED_ARG:
-                extended_arg = arg * 65536L
-                continue
-
-        else:
-            arg = None
-            ptr += 1
-
-        yield op,arg
-
-
-
-
-
-
-
-
-
-
-def find_module(module, paths=None):
-    """Just like 'imp.find_module()', but with package support"""
-
-    parts = module.split('.')
-
-    while parts:
-        part = parts.pop(0)
-        f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
-
-        if kind==PKG_DIRECTORY:
-            parts = parts or ['__init__']
-            paths = [path]
-
-        elif parts:
-            raise ImportError("Can't find %r in %s" % (parts,module))
-
-    return info
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def get_module_constant(module, symbol, default=-1, paths=None):
-
-    """Find 'module' by searching 'paths', and extract 'symbol'
-
-    Return 'None' if 'module' does not exist on 'paths', or it does not define
-    'symbol'.  If the module defines 'symbol' as a constant, return the
-    constant.  Otherwise, return 'default'."""
-
-    try:
-        f, path, (suffix,mode,kind) = find_module(module,paths)
-    except ImportError:
-        # Module doesn't exist
-        return None
-
-    try:
-        if kind==PY_COMPILED:
-            f.read(8)   # skip magic & date
-            code = marshal.load(f)
-        elif kind==PY_FROZEN:
-            code = imp.get_frozen_object(module)
-        elif kind==PY_SOURCE:
-            code = compile(f.read(), path, 'exec')
-        else:
-            # Not something we can parse; we'll have to import it.  :(
-            if module not in sys.modules:
-                imp.load_module(module,f,path,(suffix,mode,kind))
-            return getattr(sys.modules[module],symbol,None)
-
-    finally:
-        if f:
-            f.close()
-
-    return extract_constant(code,symbol,default)
-
-
-
-
-
-
-
-
-def extract_constant(code,symbol,default=-1):
-    """Extract the constant value of 'symbol' from 'code'
-
-    If the name 'symbol' is bound to a constant value by the Python code
-    object 'code', return that value.  If 'symbol' is bound to an expression,
-    return 'default'.  Otherwise, return 'None'.
-
-    Return value is based on the first assignment to 'symbol'.  'symbol' must
-    be a global, or at least a non-"fast" local in the code block.  That is,
-    only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
-    must be present in 'code.co_names'.
-    """
-
-    if symbol not in code.co_names:
-        # name's not there, can't possibly be an assigment
-        return None
-
-    name_idx = list(code.co_names).index(symbol)
-
-    STORE_NAME = 90
-    STORE_GLOBAL = 97
-    LOAD_CONST = 100
-
-    const = default
-
-    for op, arg in _iter_code(code):
-
-        if op==LOAD_CONST:
-            const = code.co_consts[arg]
-        elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
-            return const
-        else:
-            const = default
-            
-if sys.platform.startswith('java') or sys.platform == 'cli':
-    # XXX it'd be better to test assertions about bytecode instead...
-    del extract_constant, get_module_constant
-    __all__.remove('extract_constant')
-    __all__.remove('get_module_constant')
-
-
diff --git a/vendor/distribute-0.6.34/setuptools/dist.py b/vendor/distribute-0.6.34/setuptools/dist.py
deleted file mode 100644
index 998a4dbe8b963427d23780584e1dca7e35e61fcf..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/dist.py
+++ /dev/null
@@ -1,855 +0,0 @@
-__all__ = ['Distribution']
-
-import re
-from distutils.core import Distribution as _Distribution
-from setuptools.depends import Require
-from setuptools.command.install import install
-from setuptools.command.sdist import sdist
-from setuptools.command.install_lib import install_lib
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
-import os, distutils.log
-
-def _get_unpatched(cls):
-    """Protect against re-patching the distutils if reloaded
-
-    Also ensures that no other distutils extension monkeypatched the distutils
-    first.
-    """
-    while cls.__module__.startswith('setuptools'):
-        cls, = cls.__bases__
-    if not cls.__module__.startswith('distutils'):
-        raise AssertionError(
-            "distutils has already been patched by %r" % cls
-        )
-    return cls
-
-_Distribution = _get_unpatched(_Distribution)
-
-sequence = tuple, list
-
-def check_importable(dist, attr, value):
-    try:
-        ep = pkg_resources.EntryPoint.parse('x='+value)
-        assert not ep.extras
-    except (TypeError,ValueError,AttributeError,AssertionError):
-        raise DistutilsSetupError(
-            "%r must be importable 'module:attrs' string (got %r)"
-            % (attr,value)
-        )
-
-
-def assert_string_list(dist, attr, value):
-    """Verify that value is a string list or None"""
-    try:
-        assert ''.join(value)!=value
-    except (TypeError,ValueError,AttributeError,AssertionError):
-        raise DistutilsSetupError(
-            "%r must be a list of strings (got %r)" % (attr,value)
-        )
-
-def check_nsp(dist, attr, value):
-    """Verify that namespace packages are valid"""
-    assert_string_list(dist,attr,value)
-    for nsp in value:
-        if not dist.has_contents_for(nsp):
-            raise DistutilsSetupError(
-                "Distribution contains no modules or packages for " +
-                "namespace package %r" % nsp
-            )
-        if '.' in nsp:
-            parent = '.'.join(nsp.split('.')[:-1])
-            if parent not in value:
-                distutils.log.warn(
-                    "%r is declared as a package namespace, but %r is not:"
-                    " please correct this in setup.py", nsp, parent
-                )
-
-def check_extras(dist, attr, value):
-    """Verify that extras_require mapping is valid"""
-    try:
-        for k,v in value.items():
-            list(pkg_resources.parse_requirements(v))
-    except (TypeError,ValueError,AttributeError):
-        raise DistutilsSetupError(
-            "'extras_require' must be a dictionary whose values are "
-            "strings or lists of strings containing valid project/version "
-            "requirement specifiers."
-        )
-
-
-
-
-def assert_bool(dist, attr, value):
-    """Verify that value is True, False, 0, or 1"""
-    if bool(value) != value:
-        raise DistutilsSetupError(
-            "%r must be a boolean value (got %r)" % (attr,value)
-        )
-def check_requirements(dist, attr, value):
-    """Verify that install_requires is a valid requirements list"""
-    try:
-        list(pkg_resources.parse_requirements(value))
-    except (TypeError,ValueError):
-        raise DistutilsSetupError(
-            "%r must be a string or list of strings "
-            "containing valid project/version requirement specifiers" % (attr,)
-        )
-def check_entry_points(dist, attr, value):
-    """Verify that entry_points map is parseable"""
-    try:
-        pkg_resources.EntryPoint.parse_map(value)
-    except ValueError, e:
-        raise DistutilsSetupError(e)
-
-def check_test_suite(dist, attr, value):
-    if not isinstance(value,basestring):
-        raise DistutilsSetupError("test_suite must be a string")
-
-def check_package_data(dist, attr, value):
-    """Verify that value is a dictionary of package names to glob lists"""
-    if isinstance(value,dict):
-        for k,v in value.items():
-            if not isinstance(k,str): break
-            try: iter(v)
-            except TypeError:
-                break
-        else:
-            return
-    raise DistutilsSetupError(
-        attr+" must be a dictionary mapping package names to lists of "
-        "wildcard patterns"
-    )
-
-class Distribution(_Distribution):
-    """Distribution with support for features, tests, and package data
-
-    This is an enhanced version of 'distutils.dist.Distribution' that
-    effectively adds the following new optional keyword arguments to 'setup()':
-
-     'install_requires' -- a string or sequence of strings specifying project
-        versions that the distribution requires when installed, in the format
-        used by 'pkg_resources.require()'.  They will be installed
-        automatically when the package is installed.  If you wish to use
-        packages that are not available in PyPI, or want to give your users an
-        alternate download location, you can add a 'find_links' option to the
-        '[easy_install]' section of your project's 'setup.cfg' file, and then
-        setuptools will scan the listed web pages for links that satisfy the
-        requirements.
-
-     'extras_require' -- a dictionary mapping names of optional "extras" to the
-        additional requirement(s) that using those extras incurs. For example,
-        this::
-
-            extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
-
-        indicates that the distribution can optionally provide an extra
-        capability called "reST", but it can only be used if docutils and
-        reSTedit are installed.  If the user installs your package using
-        EasyInstall and requests one of your extras, the corresponding
-        additional requirements will be installed if needed.
-
-     'features' -- a dictionary mapping option names to 'setuptools.Feature'
-        objects.  Features are a portion of the distribution that can be
-        included or excluded based on user options, inter-feature dependencies,
-        and availability on the current system.  Excluded features are omitted
-        from all setup commands, including source and binary distributions, so
-        you can create multiple distributions from the same source tree.
-        Feature names should be valid Python identifiers, except that they may
-        contain the '-' (minus) sign.  Features can be included or excluded
-        via the command line options '--with-X' and '--without-X', where 'X' is
-        the name of the feature.  Whether a feature is included by default, and
-        whether you are allowed to control this from the command line, is
-        determined by the Feature object.  See the 'Feature' class for more
-        information.
-
-     'test_suite' -- the name of a test suite to run for the 'test' command.
-        If the user runs 'python setup.py test', the package will be installed,
-        and the named test suite will be run.  The format is the same as
-        would be used on a 'unittest.py' command line.  That is, it is the
-        dotted name of an object to import and call to generate a test suite.
-
-     'package_data' -- a dictionary mapping package names to lists of filenames
-        or globs to use to find data files contained in the named packages.
-        If the dictionary has filenames or globs listed under '""' (the empty
-        string), those names will be searched for in every package, in addition
-        to any names for the specific package.  Data files found using these
-        names/globs will be installed along with the package, in the same
-        location as the package.  Note that globs are allowed to reference
-        the contents of non-package subdirectories, as long as you use '/' as
-        a path separator.  (Globs are automatically converted to
-        platform-specific paths at runtime.)
-
-    In addition to these new keywords, this class also has several new methods
-    for manipulating the distribution's contents.  For example, the 'include()'
-    and 'exclude()' methods can be thought of as in-place add and subtract
-    commands that add or remove packages, modules, extensions, and so on from
-    the distribution.  They are used by the feature subsystem to configure the
-    distribution for the included and excluded features.
-    """
-
-    _patched_dist = None
-
-    def patch_missing_pkg_info(self, attrs):
-        # Fake up a replacement for the data that would normally come from
-        # PKG-INFO, but which might not yet be built if this is a fresh
-        # checkout.
-        #
-        if not attrs or 'name' not in attrs or 'version' not in attrs:
-            return
-        key = pkg_resources.safe_name(str(attrs['name'])).lower()
-        dist = pkg_resources.working_set.by_key.get(key)
-        if dist is not None and not dist.has_metadata('PKG-INFO'):
-            dist._version = pkg_resources.safe_version(str(attrs['version']))
-            self._patched_dist = dist
-
-    def __init__ (self, attrs=None):
-        have_package_data = hasattr(self, "package_data")
-        if not have_package_data:
-            self.package_data = {}
-        self.require_features = []
-        self.features = {}
-        self.dist_files = []
-        self.src_root = attrs and attrs.pop("src_root", None)
-        self.patch_missing_pkg_info(attrs)
-        # Make sure we have any eggs needed to interpret 'attrs'
-        if attrs is not None:
-            self.dependency_links = attrs.pop('dependency_links', [])
-            assert_string_list(self,'dependency_links',self.dependency_links)
-        if attrs and 'setup_requires' in attrs:
-            self.fetch_build_eggs(attrs.pop('setup_requires'))
-        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
-            if not hasattr(self,ep.name):
-                setattr(self,ep.name,None)
-        _Distribution.__init__(self,attrs)
-        if isinstance(self.metadata.version, (int,long,float)):
-            # Some people apparently take "version number" too literally :)
-            self.metadata.version = str(self.metadata.version)
-
-    def parse_command_line(self):
-        """Process features after parsing command line options"""
-        result = _Distribution.parse_command_line(self)
-        if self.features:
-            self._finalize_features()
-        return result
-
-    def _feature_attrname(self,name):
-        """Convert feature name to corresponding option attribute name"""
-        return 'with_'+name.replace('-','_')
-
-    def fetch_build_eggs(self, requires):
-        """Resolve pre-setup requirements"""
-        from pkg_resources import working_set, parse_requirements
-        for dist in working_set.resolve(
-            parse_requirements(requires), installer=self.fetch_build_egg
-        ):
-            working_set.add(dist)
-
-    def finalize_options(self):
-        _Distribution.finalize_options(self)
-        if self.features:
-            self._set_global_opts_from_features()
-
-        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
-            value = getattr(self,ep.name,None)
-            if value is not None:
-                ep.require(installer=self.fetch_build_egg)
-                ep.load()(self, ep.name, value)
-        if getattr(self, 'convert_2to3_doctests', None):
-            # XXX may convert to set here when we can rely on set being builtin
-            self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
-        else:
-            self.convert_2to3_doctests = []
-
-    def fetch_build_egg(self, req):
-        """Fetch an egg needed for building"""
-
-        try:
-            cmd = self._egg_fetcher
-            cmd.package_index.to_scan = []
-        except AttributeError:
-            from setuptools.command.easy_install import easy_install
-            dist = self.__class__({'script_args':['easy_install']})
-            dist.parse_config_files()
-            opts = dist.get_option_dict('easy_install')
-            keep = (
-                'find_links', 'site_dirs', 'index_url', 'optimize',
-                'site_dirs', 'allow_hosts'
-            )
-            for key in opts.keys():
-                if key not in keep:
-                    del opts[key]   # don't use any other settings
-            if self.dependency_links:
-                links = self.dependency_links[:]
-                if 'find_links' in opts:
-                    links = opts['find_links'][1].split() + links
-                opts['find_links'] = ('setup', links)
-            cmd = easy_install(
-                dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
-                always_copy=False, build_directory=None, editable=False,
-                upgrade=False, multi_version=True, no_report=True, user=False
-            )
-            cmd.ensure_finalized()
-            self._egg_fetcher = cmd
-        return cmd.easy_install(req)
-
-    def _set_global_opts_from_features(self):
-        """Add --with-X/--without-X options based on optional features"""
-
-        go = []
-        no = self.negative_opt.copy()
-
-        for name,feature in self.features.items():
-            self._set_feature(name,None)
-            feature.validate(self)
-
-            if feature.optional:
-                descr = feature.description
-                incdef = ' (default)'
-                excdef=''
-                if not feature.include_by_default():
-                    excdef, incdef = incdef, excdef
-
-                go.append(('with-'+name, None, 'include '+descr+incdef))
-                go.append(('without-'+name, None, 'exclude '+descr+excdef))
-                no['without-'+name] = 'with-'+name
-
-        self.global_options = self.feature_options = go + self.global_options
-        self.negative_opt = self.feature_negopt = no
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def _finalize_features(self):
-        """Add/remove features and resolve dependencies between them"""
-
-        # First, flag all the enabled items (and thus their dependencies)
-        for name,feature in self.features.items():
-            enabled = self.feature_is_included(name)
-            if enabled or (enabled is None and feature.include_by_default()):
-                feature.include_in(self)
-                self._set_feature(name,1)
-
-        # Then disable the rest, so that off-by-default features don't
-        # get flagged as errors when they're required by an enabled feature
-        for name,feature in self.features.items():
-            if not self.feature_is_included(name):
-                feature.exclude_from(self)
-                self._set_feature(name,0)
-
-
-    def get_command_class(self, command):
-        """Pluggable version of get_command_class()"""
-        if command in self.cmdclass:
-            return self.cmdclass[command]
-
-        for ep in pkg_resources.iter_entry_points('distutils.commands',command):
-            ep.require(installer=self.fetch_build_egg)
-            self.cmdclass[command] = cmdclass = ep.load()
-            return cmdclass
-        else:
-            return _Distribution.get_command_class(self, command)
-
-    def print_commands(self):
-        for ep in pkg_resources.iter_entry_points('distutils.commands'):
-            if ep.name not in self.cmdclass:
-                cmdclass = ep.load(False) # don't require extras, we're not running
-                self.cmdclass[ep.name] = cmdclass
-        return _Distribution.print_commands(self)
-
-
-
-
-
-    def _set_feature(self,name,status):
-        """Set feature's inclusion status"""
-        setattr(self,self._feature_attrname(name),status)
-
-    def feature_is_included(self,name):
-        """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
-        return getattr(self,self._feature_attrname(name))
-
-    def include_feature(self,name):
-        """Request inclusion of feature named 'name'"""
-
-        if self.feature_is_included(name)==0:
-            descr = self.features[name].description
-            raise DistutilsOptionError(
-               descr + " is required, but was excluded or is not available"
-           )
-        self.features[name].include_in(self)
-        self._set_feature(name,1)
-
-    def include(self,**attrs):
-        """Add items to distribution that are named in keyword arguments
-
-        For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
-        the distribution's 'py_modules' attribute, if it was not already
-        there.
-
-        Currently, this method only supports inclusion for attributes that are
-        lists or tuples.  If you need to add support for adding to other
-        attributes in this or a subclass, you can add an '_include_X' method,
-        where 'X' is the name of the attribute.  The method will be called with
-        the value passed to 'include()'.  So, 'dist.include(foo={"bar":"baz"})'
-        will try to call 'dist._include_foo({"bar":"baz"})', which can then
-        handle whatever special inclusion logic is needed.
-        """
-        for k,v in attrs.items():
-            include = getattr(self, '_include_'+k, None)
-            if include:
-                include(v)
-            else:
-                self._include_misc(k,v)
-
-    def exclude_package(self,package):
-        """Remove packages, modules, and extensions in named package"""
-
-        pfx = package+'.'
-        if self.packages:
-            self.packages = [
-                p for p in self.packages
-                    if p != package and not p.startswith(pfx)
-            ]
-
-        if self.py_modules:
-            self.py_modules = [
-                p for p in self.py_modules
-                    if p != package and not p.startswith(pfx)
-            ]
-
-        if self.ext_modules:
-            self.ext_modules = [
-                p for p in self.ext_modules
-                    if p.name != package and not p.name.startswith(pfx)
-            ]
-
-
-    def has_contents_for(self,package):
-        """Return true if 'exclude_package(package)' would do something"""
-
-        pfx = package+'.'
-
-        for p in self.iter_distribution_names():
-            if p==package or p.startswith(pfx):
-                return True
-
-
-
-
-
-
-
-
-
-
-    def _exclude_misc(self,name,value):
-        """Handle 'exclude()' for list/tuple attrs without a special handler"""
-        if not isinstance(value,sequence):
-            raise DistutilsSetupError(
-                "%s: setting must be a list or tuple (%r)" % (name, value)
-            )
-        try:
-            old = getattr(self,name)
-        except AttributeError:
-            raise DistutilsSetupError(
-                "%s: No such distribution setting" % name
-            )
-        if old is not None and not isinstance(old,sequence):
-            raise DistutilsSetupError(
-                name+": this setting cannot be changed via include/exclude"
-            )
-        elif old:
-            setattr(self,name,[item for item in old if item not in value])
-
-    def _include_misc(self,name,value):
-        """Handle 'include()' for list/tuple attrs without a special handler"""
-
-        if not isinstance(value,sequence):
-            raise DistutilsSetupError(
-                "%s: setting must be a list (%r)" % (name, value)
-            )
-        try:
-            old = getattr(self,name)
-        except AttributeError:
-            raise DistutilsSetupError(
-                "%s: No such distribution setting" % name
-            )
-        if old is None:
-            setattr(self,name,value)
-        elif not isinstance(old,sequence):
-            raise DistutilsSetupError(
-                name+": this setting cannot be changed via include/exclude"
-            )
-        else:
-            setattr(self,name,old+[item for item in value if item not in old])
-
-    def exclude(self,**attrs):
-        """Remove items from distribution that are named in keyword arguments
-
-        For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
-        the distribution's 'py_modules' attribute.  Excluding packages uses
-        the 'exclude_package()' method, so all of the package's contained
-        packages, modules, and extensions are also excluded.
-
-        Currently, this method only supports exclusion from attributes that are
-        lists or tuples.  If you need to add support for excluding from other
-        attributes in this or a subclass, you can add an '_exclude_X' method,
-        where 'X' is the name of the attribute.  The method will be called with
-        the value passed to 'exclude()'.  So, 'dist.exclude(foo={"bar":"baz"})'
-        will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
-        handle whatever special exclusion logic is needed.
-        """
-        for k,v in attrs.items():
-            exclude = getattr(self, '_exclude_'+k, None)
-            if exclude:
-                exclude(v)
-            else:
-                self._exclude_misc(k,v)
-
-    def _exclude_packages(self,packages):
-        if not isinstance(packages,sequence):
-            raise DistutilsSetupError(
-                "packages: setting must be a list or tuple (%r)" % (packages,)
-            )
-        map(self.exclude_package, packages)
-
-
-
-
-
-
-
-
-
-
-
-
-    def _parse_command_opts(self, parser, args):
-        # Remove --with-X/--without-X options when processing command args
-        self.global_options = self.__class__.global_options
-        self.negative_opt = self.__class__.negative_opt
-
-        # First, expand any aliases
-        command = args[0]
-        aliases = self.get_option_dict('aliases')
-        while command in aliases:
-            src,alias = aliases[command]
-            del aliases[command]    # ensure each alias can expand only once!
-            import shlex
-            args[:1] = shlex.split(alias,True)
-            command = args[0]
-
-        nargs = _Distribution._parse_command_opts(self, parser, args)
-
-        # Handle commands that want to consume all remaining arguments
-        cmd_class = self.get_command_class(command)
-        if getattr(cmd_class,'command_consumes_arguments',None):
-            self.get_option_dict(command)['args'] = ("command line", nargs)
-            if nargs is not None:
-                return []
-
-        return nargs
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    def get_cmdline_options(self):
-        """Return a '{cmd: {opt:val}}' map of all command-line options
-
-        Option names are all long, but do not include the leading '--', and
-        contain dashes rather than underscores.  If the option doesn't take
-        an argument (e.g. '--quiet'), the 'val' is 'None'.
-
-        Note that options provided by config files are intentionally excluded.
-        """
-
-        d = {}
-
-        for cmd,opts in self.command_options.items():
-
-            for opt,(src,val) in opts.items():
-
-                if src != "command line":
-                    continue
-
-                opt = opt.replace('_','-')
-
-                if val==0:
-                    cmdobj = self.get_command_obj(cmd)
-                    neg_opt = self.negative_opt.copy()
-                    neg_opt.update(getattr(cmdobj,'negative_opt',{}))
-                    for neg,pos in neg_opt.items():
-                        if pos==opt:
-                            opt=neg
-                            val=None
-                            break
-                    else:
-                        raise AssertionError("Shouldn't be able to get here")
-
-                elif val==1:
-                    val = None
-
-                d.setdefault(cmd,{})[opt] = val
-
-        return d
-
-
-    def iter_distribution_names(self):
-        """Yield all packages, modules, and extension names in distribution"""
-
-        for pkg in self.packages or ():
-            yield pkg
-
-        for module in self.py_modules or ():
-            yield module
-
-        for ext in self.ext_modules or ():
-            if isinstance(ext,tuple):
-                name, buildinfo = ext
-            else:
-                name = ext.name
-            if name.endswith('module'):
-                name = name[:-6]
-            yield name
-
-
-    def handle_display_options(self, option_order):
-        """If there were any non-global "display-only" options
-        (--help-commands or the metadata display options) on the command
-        line, display the requested info and return true; else return
-        false.
-        """
-        import sys
-
-        if sys.version_info < (3,) or self.help_commands:
-            return _Distribution.handle_display_options(self, option_order)
-
-        # Stdout may be StringIO (e.g. in tests)
-        import io
-        if not isinstance(sys.stdout, io.TextIOWrapper):
-            return _Distribution.handle_display_options(self, option_order)
-
-        # Don't wrap stdout if utf-8 is already the encoding. Provides
-        #  workaround for #334.
-        if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
-            return _Distribution.handle_display_options(self, option_order)
-
-        # Print metadata in UTF-8 no matter the platform
-        encoding = sys.stdout.encoding
-        errors = sys.stdout.errors
-        newline = sys.platform != 'win32' and '\n' or None
-        line_buffering = sys.stdout.line_buffering
-
-        sys.stdout = io.TextIOWrapper(
-            sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
-        try:
-            return _Distribution.handle_display_options(self, option_order)
-        finally:
-            sys.stdout = io.TextIOWrapper(
-                sys.stdout.detach(), encoding, errors, newline, line_buffering)
-
-
-# Install it throughout the distutils
-for module in distutils.dist, distutils.core, distutils.cmd:
-    module.Distribution = Distribution
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-class Feature:
-    """A subset of the distribution that can be excluded if unneeded/wanted
-
-    Features are created using these keyword arguments:
-
-      'description' -- a short, human readable description of the feature, to
-         be used in error messages, and option help messages.
-
-      'standard' -- if true, the feature is included by default if it is
-         available on the current system.  Otherwise, the feature is only
-         included if requested via a command line '--with-X' option, or if
-         another included feature requires it.  The default setting is 'False'.
-
-      'available' -- if true, the feature is available for installation on the
-         current system.  The default setting is 'True'.
-
-      'optional' -- if true, the feature's inclusion can be controlled from the
-         command line, using the '--with-X' or '--without-X' options.  If
-         false, the feature's inclusion status is determined automatically,
-         based on 'availabile', 'standard', and whether any other feature
-         requires it.  The default setting is 'True'.
-
-      'require_features' -- a string or sequence of strings naming features
-         that should also be included if this feature is included.  Defaults to
-         empty list.  May also contain 'Require' objects that should be
-         added/removed from the distribution.
-
-      'remove' -- a string or list of strings naming packages to be removed
-         from the distribution if this feature is *not* included.  If the
-         feature *is* included, this argument is ignored.  This argument exists
-         to support removing features that "crosscut" a distribution, such as
-         defining a 'tests' feature that removes all the 'tests' subpackages
-         provided by other features.  The default for this argument is an empty
-         list.  (Note: the named package(s) or modules must exist in the base
-         distribution when the 'setup()' function is initially called.)
-
-      other keywords -- any other keyword arguments are saved, and passed to
-         the distribution's 'include()' and 'exclude()' methods when the
-         feature is included or excluded, respectively.  So, for example, you
-         could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
-         added or removed from the distribution as appropriate.
-
-    A feature must include at least one 'requires', 'remove', or other
-    keyword argument.  Otherwise, it can't affect the distribution in any way.
-    Note also that you can subclass 'Feature' to create your own specialized
-    feature types that modify the distribution in other ways when included or
-    excluded.  See the docstrings for the various methods here for more detail.
-    Aside from the methods, the only feature attributes that distributions look
-    at are 'description' and 'optional'.
-    """
-    def __init__(self, description, standard=False, available=True,
-        optional=True, require_features=(), remove=(), **extras
-    ):
-
-        self.description = description
-        self.standard = standard
-        self.available = available
-        self.optional = optional
-        if isinstance(require_features,(str,Require)):
-            require_features = require_features,
-
-        self.require_features = [
-            r for r in require_features if isinstance(r,str)
-        ]
-        er = [r for r in require_features if not isinstance(r,str)]
-        if er: extras['require_features'] = er
-
-        if isinstance(remove,str):
-            remove = remove,
-        self.remove = remove
-        self.extras = extras
-
-        if not remove and not require_features and not extras:
-            raise DistutilsSetupError(
-                "Feature %s: must define 'require_features', 'remove', or at least one"
-                " of 'packages', 'py_modules', etc."
-            )
-
-    def include_by_default(self):
-        """Should this feature be included by default?"""
-        return self.available and self.standard
-
-    def include_in(self,dist):
-
-        """Ensure feature and its requirements are included in distribution
-
-        You may override this in a subclass to perform additional operations on
-        the distribution.  Note that this method may be called more than once
-        per feature, and so should be idempotent.
-
-        """
-
-        if not self.available:
-            raise DistutilsPlatformError(
-                self.description+" is required,"
-                "but is not available on this platform"
-            )
-
-        dist.include(**self.extras)
-
-        for f in self.require_features:
-            dist.include_feature(f)
-
-
-
-    def exclude_from(self,dist):
-
-        """Ensure feature is excluded from distribution
-
-        You may override this in a subclass to perform additional operations on
-        the distribution.  This method will be called at most once per
-        feature, and only after all included features have been asked to
-        include themselves.
-        """
-
-        dist.exclude(**self.extras)
-
-        if self.remove:
-            for item in self.remove:
-                dist.exclude_package(item)
-
-
-
-    def validate(self,dist):
-
-        """Verify that feature makes sense in context of distribution
-
-        This method is called by the distribution just before it parses its
-        command line.  It checks to ensure that the 'remove' attribute, if any,
-        contains only valid package/module names that are present in the base
-        distribution when 'setup()' is called.  You may override it in a
-        subclass to perform any other required validation of the feature
-        against a target distribution.
-        """
-
-        for item in self.remove:
-            if not dist.has_contents_for(item):
-                raise DistutilsSetupError(
-                    "%s wants to be able to remove %s, but the distribution"
-                    " doesn't contain any packages or modules under %s"
-                    % (self.description, item, item)
-                )
-
-
-
-def check_packages(dist, attr, value):
-    for pkgname in value:
-        if not re.match(r'\w+(\.\w+)*', pkgname):
-            distutils.log.warn(
-                "WARNING: %r not a valid package name; please use only"
-                ".-separated package names in setup.py", pkgname
-            )
-
diff --git a/vendor/distribute-0.6.34/setuptools/extension.py b/vendor/distribute-0.6.34/setuptools/extension.py
deleted file mode 100644
index eb8b836cc316ffaccfd64cef48a50e36b5a674e9..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/extension.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import sys
-import distutils.core
-import distutils.extension
-
-from setuptools.dist import _get_unpatched
-
-_Extension = _get_unpatched(distutils.core.Extension)
-
-def have_pyrex():
-    """
-    Return True if Cython or Pyrex can be imported.
-    """
-    pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext'
-    for pyrex_impl in pyrex_impls:
-        try:
-            # from (pyrex_impl) import build_ext
-            __import__(pyrex_impl, fromlist=['build_ext']).build_ext
-            return True
-        except Exception:
-            pass
-    return False
-
-
-class Extension(_Extension):
-    """Extension that uses '.c' files in place of '.pyx' files"""
-
-    def __init__(self, *args, **kw):
-        _Extension.__init__(self, *args, **kw)
-        if not have_pyrex():
-            self._convert_pyx_sources_to_c()
-
-    def _convert_pyx_sources_to_c(self):
-        "convert .pyx extensions to .c"
-        def pyx_to_c(source):
-            if source.endswith('.pyx'):
-                source = source[:-4] + '.c'
-            return source
-        self.sources = map(pyx_to_c, self.sources)
-
-class Library(Extension):
-    """Just like a regular Extension, but built as a library instead"""
-
-distutils.core.Extension = Extension
-distutils.extension.Extension = Extension
-if 'distutils.command.build_ext' in sys.modules:
-    sys.modules['distutils.command.build_ext'].Extension = Extension
diff --git a/vendor/distribute-0.6.34/setuptools/gui-32.exe b/vendor/distribute-0.6.34/setuptools/gui-32.exe
deleted file mode 100755
index 3f64af7de42fd6597b4c6cf50896d32a98a7d6a2..0000000000000000000000000000000000000000
Binary files a/vendor/distribute-0.6.34/setuptools/gui-32.exe and /dev/null differ
diff --git a/vendor/distribute-0.6.34/setuptools/gui-64.exe b/vendor/distribute-0.6.34/setuptools/gui-64.exe
deleted file mode 100755
index 3ab4378e1d401d198b92a33ce249d29bcdb26a63..0000000000000000000000000000000000000000
Binary files a/vendor/distribute-0.6.34/setuptools/gui-64.exe and /dev/null differ
diff --git a/vendor/distribute-0.6.34/setuptools/gui.exe b/vendor/distribute-0.6.34/setuptools/gui.exe
deleted file mode 100755
index 3f64af7de42fd6597b4c6cf50896d32a98a7d6a2..0000000000000000000000000000000000000000
Binary files a/vendor/distribute-0.6.34/setuptools/gui.exe and /dev/null differ
diff --git a/vendor/distribute-0.6.34/setuptools/package_index.py b/vendor/distribute-0.6.34/setuptools/package_index.py
deleted file mode 100644
index 0ee21e3b7bbbd045f79dc382a09557a42ca17cb7..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/package_index.py
+++ /dev/null
@@ -1,920 +0,0 @@
-"""PyPI and direct package downloading"""
-import sys, os.path, re, urlparse, urllib, urllib2, shutil, random, socket, cStringIO
-import base64
-import httplib
-from pkg_resources import *
-from distutils import log
-from distutils.errors import DistutilsError
-try:
-    from hashlib import md5
-except ImportError:
-    from md5 import md5
-from fnmatch import translate
-
-EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
-HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
-# this is here to fix emacs' cruddy broken syntax highlighting
-PYPI_MD5 = re.compile(
-    '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
-    'href="[^?]+\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
-)
-URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
-EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
-
-__all__ = [
-    'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
-    'interpret_distro_name',
-]
-
-_SOCKET_TIMEOUT = 15
-
-def parse_bdist_wininst(name):
-    """Return (base,pyversion) or (None,None) for possible .exe name"""
-
-    lower = name.lower()
-    base, py_ver, plat = None, None, None
-
-    if lower.endswith('.exe'):
-        if lower.endswith('.win32.exe'):
-            base = name[:-10]
-            plat = 'win32'
-        elif lower.startswith('.win32-py',-16):
-            py_ver = name[-7:-4]
-            base = name[:-16]
-            plat = 'win32'
-        elif lower.endswith('.win-amd64.exe'):
-            base = name[:-14]
-            plat = 'win-amd64'
-        elif lower.startswith('.win-amd64-py',-20):
-            py_ver = name[-7:-4]
-            base = name[:-20]
-            plat = 'win-amd64'
-    return base,py_ver,plat
-
-
-def egg_info_for_url(url):
-    scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
-    base = urllib2.unquote(path.split('/')[-1])
-    if '#' in base: base, fragment = base.split('#',1)
-    return base,fragment
-
-def distros_for_url(url, metadata=None):
-    """Yield egg or source distribution objects that might be found at a URL"""
-    base, fragment = egg_info_for_url(url)
-    for dist in distros_for_location(url, base, metadata): yield dist
-    if fragment:
-        match = EGG_FRAGMENT.match(fragment)
-        if match:
-            for dist in interpret_distro_name(
-                url, match.group(1), metadata, precedence = CHECKOUT_DIST
-            ):
-                yield dist
-
-def distros_for_location(location, basename, metadata=None):
-    """Yield egg or source distribution objects based on basename"""
-    if basename.endswith('.egg.zip'):
-        basename = basename[:-4]    # strip the .zip
-    if basename.endswith('.egg') and '-' in basename:
-        # only one, unambiguous interpretation
-        return [Distribution.from_location(location, basename, metadata)]
-
-    if basename.endswith('.exe'):
-        win_base, py_ver, platform = parse_bdist_wininst(basename)
-        if win_base is not None:
-            return interpret_distro_name(
-                location, win_base, metadata, py_ver, BINARY_DIST, platform
-            )
-
-    # Try source distro extensions (.zip, .tgz, etc.)
-    #
-    for ext in EXTENSIONS:
-        if basename.endswith(ext):
-            basename = basename[:-len(ext)]
-            return interpret_distro_name(location, basename, metadata)
-    return []  # no extension matched
-
-def distros_for_filename(filename, metadata=None):
-    """Yield possible egg or source distribution objects based on a filename"""
-    return distros_for_location(
-        normalize_path(filename), os.path.basename(filename), metadata
-    )
-
-
-def interpret_distro_name(location, basename, metadata,
-    py_version=None, precedence=SOURCE_DIST, platform=None
-):
-    """Generate alternative interpretations of a source distro name
-
-    Note: if `location` is a filesystem filename, you should call
-    ``pkg_resources.normalize_path()`` on it before passing it to this
-    routine!
-    """
-    # Generate alternative interpretations of a source distro name
-    # Because some packages are ambiguous as to name/versions split
-    # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
-    # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
-    # "adns-python, 1.1.0", and "adns-python-1.1.0, no version").  In practice,
-    # the spurious interpretations should be ignored, because in the event
-    # there's also an "adns" package, the spurious "python-1.1.0" version will
-    # compare lower than any numeric version number, and is therefore unlikely
-    # to match a request for it.  It's still a potential problem, though, and
-    # in the long run PyPI and the distutils should go for "safe" names and
-    # versions in distribution archive names (sdist and bdist).
-
-    parts = basename.split('-')
-    if not py_version:
-        for i,p in enumerate(parts[2:]):
-            if len(p)==5 and p.startswith('py2.'):
-                return # It's a bdist_dumb, not an sdist -- bail out
-
-    for p in range(1,len(parts)+1):
-        yield Distribution(
-            location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
-            py_version=py_version, precedence = precedence,
-            platform = platform
-        )
-
-REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
-# this line is here to fix emacs' cruddy broken syntax highlighting
-
-def find_external_links(url, page):
-    """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
-
-    for match in REL.finditer(page):
-        tag, rel = match.groups()
-        rels = map(str.strip, rel.lower().split(','))
-        if 'homepage' in rels or 'download' in rels:
-            for match in HREF.finditer(tag):
-                yield urlparse.urljoin(url, htmldecode(match.group(1)))
-
-    for tag in ("<th>Home Page", "<th>Download URL"):
-        pos = page.find(tag)
-        if pos!=-1:
-            match = HREF.search(page,pos)
-            if match:
-                yield urlparse.urljoin(url, htmldecode(match.group(1)))
-
-user_agent = "Python-urllib/%s distribute/%s" % (
-    sys.version[:3], require('distribute')[0].version
-)
-
-
-class PackageIndex(Environment):
-    """A distribution index that scans web pages for download URLs"""
-
-    def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',),
-        *args, **kw
-    ):
-        Environment.__init__(self,*args,**kw)
-        self.index_url = index_url + "/"[:not index_url.endswith('/')]
-        self.scanned_urls = {}
-        self.fetched_urls = {}
-        self.package_pages = {}
-        self.allows = re.compile('|'.join(map(translate,hosts))).match
-        self.to_scan = []
-
-
-
-    def process_url(self, url, retrieve=False):
-        """Evaluate a URL as a possible download, and maybe retrieve it"""
-        if url in self.scanned_urls and not retrieve:
-            return
-        self.scanned_urls[url] = True
-        if not URL_SCHEME(url):
-            self.process_filename(url)
-            return
-        else:
-            dists = list(distros_for_url(url))
-            if dists:
-                if not self.url_ok(url):
-                    return
-                self.debug("Found link: %s", url)
-
-        if dists or not retrieve or url in self.fetched_urls:
-            map(self.add, dists)
-            return  # don't need the actual page
-
-        if not self.url_ok(url):
-            self.fetched_urls[url] = True
-            return
-
-        self.info("Reading %s", url)
-        f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url)
-        if f is None: return
-        self.fetched_urls[url] = self.fetched_urls[f.url] = True
-
-        if 'html' not in f.headers.get('content-type', '').lower():
-            f.close()   # not html, we can't process it
-            return
-
-        base = f.url     # handle redirects
-        page = f.read()
-        if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
-            if isinstance(f, urllib2.HTTPError):
-                # Errors have no charset, assume latin1:
-                charset = 'latin-1'
-            else:
-                charset = f.headers.get_param('charset') or 'latin-1'
-            page = page.decode(charset, "ignore")
-        f.close()
-        for match in HREF.finditer(page):
-            link = urlparse.urljoin(base, htmldecode(match.group(1)))
-            self.process_url(link)
-        if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
-            page = self.process_index(url, page)
-
-    def process_filename(self, fn, nested=False):
-        # process filenames or directories
-        if not os.path.exists(fn):
-            self.warn("Not found: %s", fn)
-            return
-
-        if os.path.isdir(fn) and not nested:
-            path = os.path.realpath(fn)
-            for item in os.listdir(path):
-                self.process_filename(os.path.join(path,item), True)
-
-        dists = distros_for_filename(fn)
-        if dists:
-            self.debug("Found: %s", fn)
-            map(self.add, dists)
-
-    def url_ok(self, url, fatal=False):
-        s = URL_SCHEME(url)
-        if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]):
-            return True
-        msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
-        if fatal:
-            raise DistutilsError(msg % url)
-        else:
-            self.warn(msg, url)
-
-    def scan_egg_links(self, search_path):
-        for item in search_path:
-            if os.path.isdir(item):
-                for entry in os.listdir(item):
-                    if entry.endswith('.egg-link'):
-                        self.scan_egg_link(item, entry)
-
-    def scan_egg_link(self, path, entry):
-        lines = filter(None, map(str.strip, open(os.path.join(path, entry))))
-        if len(lines)==2:
-            for dist in find_distributions(os.path.join(path, lines[0])):
-                dist.location = os.path.join(path, *lines)
-                dist.precedence = SOURCE_DIST
-                self.add(dist)
-
-    def process_index(self,url,page):
-        """Process the contents of a PyPI page"""
-        def scan(link):
-            # Process a URL to see if it's for a package page
-            if link.startswith(self.index_url):
-                parts = map(
-                    urllib2.unquote, link[len(self.index_url):].split('/')
-                )
-                if len(parts)==2 and '#' not in parts[1]:
-                    # it's a package page, sanitize and index it
-                    pkg = safe_name(parts[0])
-                    ver = safe_version(parts[1])
-                    self.package_pages.setdefault(pkg.lower(),{})[link] = True
-                    return to_filename(pkg), to_filename(ver)
-            return None, None
-
-        # process an index page into the package-page index
-        for match in HREF.finditer(page):
-            try:
-                scan( urlparse.urljoin(url, htmldecode(match.group(1))) )
-            except ValueError:
-                pass
-
-        pkg, ver = scan(url)   # ensure this page is in the page index
-        if pkg:
-            # process individual package page
-            for new_url in find_external_links(url, page):
-                # Process the found URL
-                base, frag = egg_info_for_url(new_url)
-                if base.endswith('.py') and not frag:
-                    if ver:
-                        new_url+='#egg=%s-%s' % (pkg,ver)
-                    else:
-                        self.need_version_info(url)
-                self.scan_url(new_url)
-
-            return PYPI_MD5.sub(
-                lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
-            )
-        else:
-            return ""   # no sense double-scanning non-package pages
-
-
-
-    def need_version_info(self, url):
-        self.scan_all(
-            "Page at %s links to .py file(s) without version info; an index "
-            "scan is required.", url
-        )
-
-    def scan_all(self, msg=None, *args):
-        if self.index_url not in self.fetched_urls:
-            if msg: self.warn(msg,*args)
-            self.info(
-                "Scanning index of all packages (this may take a while)"
-            )
-        self.scan_url(self.index_url)
-
-    def find_packages(self, requirement):
-        self.scan_url(self.index_url + requirement.unsafe_name+'/')
-
-        if not self.package_pages.get(requirement.key):
-            # Fall back to safe version of the name
-            self.scan_url(self.index_url + requirement.project_name+'/')
-
-        if not self.package_pages.get(requirement.key):
-            # We couldn't find the target package, so search the index page too
-            self.not_found_in_index(requirement)
-
-        for url in list(self.package_pages.get(requirement.key,())):
-            # scan each page that might be related to the desired package
-            self.scan_url(url)
-
-    def obtain(self, requirement, installer=None):
-        self.prescan(); self.find_packages(requirement)
-        for dist in self[requirement.key]:
-            if dist in requirement:
-                return dist
-            self.debug("%s does not match %s", requirement, dist)
-        return super(PackageIndex, self).obtain(requirement,installer)
-
-
-
-
-
-    def check_md5(self, cs, info, filename, tfp):
-        if re.match('md5=[0-9a-f]{32}$', info):
-            self.debug("Validating md5 checksum for %s", filename)
-            if cs.hexdigest()<>info[4:]:
-                tfp.close()
-                os.unlink(filename)
-                raise DistutilsError(
-                    "MD5 validation failed for "+os.path.basename(filename)+
-                    "; possible download problem?"
-                )
-
-    def add_find_links(self, urls):
-        """Add `urls` to the list that will be prescanned for searches"""
-        for url in urls:
-            if (
-                self.to_scan is None        # if we have already "gone online"
-                or not URL_SCHEME(url)      # or it's a local file/directory
-                or url.startswith('file:')
-                or list(distros_for_url(url))   # or a direct package link
-            ):
-                # then go ahead and process it now
-                self.scan_url(url)
-            else:
-                # otherwise, defer retrieval till later
-                self.to_scan.append(url)
-
-    def prescan(self):
-        """Scan urls scheduled for prescanning (e.g. --find-links)"""
-        if self.to_scan:
-            map(self.scan_url, self.to_scan)
-        self.to_scan = None     # from now on, go ahead and process immediately
-
-    def not_found_in_index(self, requirement):
-        if self[requirement.key]:   # we've seen at least one distro
-            meth, msg = self.info, "Couldn't retrieve index page for %r"
-        else:   # no distros seen for this name, might be misspelled
-            meth, msg = (self.warn,
-                "Couldn't find index page for %r (maybe misspelled?)")
-        meth(msg, requirement.unsafe_name)
-        self.scan_all()
-
-    def download(self, spec, tmpdir):
-        """Locate and/or download `spec` to `tmpdir`, returning a local path
-
-        `spec` may be a ``Requirement`` object, or a string containing a URL,
-        an existing local filename, or a project/version requirement spec
-        (i.e. the string form of a ``Requirement`` object).  If it is the URL
-        of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
-        that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
-        automatically created alongside the downloaded file.
-
-        If `spec` is a ``Requirement`` object or a string containing a
-        project/version requirement spec, this method returns the location of
-        a matching distribution (possibly after downloading it to `tmpdir`).
-        If `spec` is a locally existing file or directory name, it is simply
-        returned unchanged.  If `spec` is a URL, it is downloaded to a subpath
-        of `tmpdir`, and the local filename is returned.  Various errors may be
-        raised if a problem occurs during downloading.
-        """
-        if not isinstance(spec,Requirement):
-            scheme = URL_SCHEME(spec)
-            if scheme:
-                # It's a url, download it to tmpdir
-                found = self._download_url(scheme.group(1), spec, tmpdir)
-                base, fragment = egg_info_for_url(spec)
-                if base.endswith('.py'):
-                    found = self.gen_setup(found,fragment,tmpdir)
-                return found
-            elif os.path.exists(spec):
-                # Existing file or directory, just return it
-                return spec
-            else:
-                try:
-                    spec = Requirement.parse(spec)
-                except ValueError:
-                    raise DistutilsError(
-                        "Not a URL, existing file, or requirement spec: %r" %
-                        (spec,)
-                    )
-        return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
-
-
-    def fetch_distribution(self,
-        requirement, tmpdir, force_scan=False, source=False, develop_ok=False,
-        local_index=None
-    ):
-        """Obtain a distribution suitable for fulfilling `requirement`
-
-        `requirement` must be a ``pkg_resources.Requirement`` instance.
-        If necessary, or if the `force_scan` flag is set, the requirement is
-        searched for in the (online) package index as well as the locally
-        installed packages.  If a distribution matching `requirement` is found,
-        the returned distribution's ``location`` is the value you would have
-        gotten from calling the ``download()`` method with the matching
-        distribution's URL or filename.  If no matching distribution is found,
-        ``None`` is returned.
-
-        If the `source` flag is set, only source distributions and source
-        checkout links will be considered.  Unless the `develop_ok` flag is
-        set, development and system eggs (i.e., those using the ``.egg-info``
-        format) will be ignored.
-        """
-
-        # process a Requirement
-        self.info("Searching for %s", requirement)
-        skipped = {}
-        dist = None
-
-        def find(req, env=None):
-            if env is None:
-                env = self
-            # Find a matching distribution; may be called more than once
-
-            for dist in env[req.key]:
-
-                if dist.precedence==DEVELOP_DIST and not develop_ok:
-                    if dist not in skipped:
-                        self.warn("Skipping development or system egg: %s",dist)
-                        skipped[dist] = 1
-                    continue
-
-                if dist in req and (dist.precedence<=SOURCE_DIST or not source):
-                    self.info("Best match: %s", dist)
-                    return dist.clone(
-                        location=self.download(dist.location, tmpdir)
-                    )
-
-        if force_scan:
-            self.prescan()
-            self.find_packages(requirement)
-            dist = find(requirement)
-
-        if local_index is not None:
-            dist = dist or find(requirement, local_index)
-
-        if dist is None and self.to_scan is not None:
-            self.prescan()
-            dist = find(requirement)
-
-        if dist is None and not force_scan:
-            self.find_packages(requirement)
-            dist = find(requirement)
-
-        if dist is None:
-            self.warn(
-                "No local packages or download links found for %s%s",
-                (source and "a source distribution of " or ""),
-                requirement,
-            )
-        return dist
-
-    def fetch(self, requirement, tmpdir, force_scan=False, source=False):
-        """Obtain a file suitable for fulfilling `requirement`
-
-        DEPRECATED; use the ``fetch_distribution()`` method now instead.  For
-        backward compatibility, this routine is identical but returns the
-        ``location`` of the downloaded distribution instead of a distribution
-        object.
-        """
-        dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
-        if dist is not None:
-            return dist.location
-        return None
-
-
-
-
-
-
-
-
-    def gen_setup(self, filename, fragment, tmpdir):
-        match = EGG_FRAGMENT.match(fragment)
-        dists = match and [d for d in
-            interpret_distro_name(filename, match.group(1), None) if d.version
-        ] or []
-
-        if len(dists)==1:   # unambiguous ``#egg`` fragment
-            basename = os.path.basename(filename)
-
-            # Make sure the file has been downloaded to the temp dir.
-            if os.path.dirname(filename) != tmpdir:
-                dst = os.path.join(tmpdir, basename)
-                from setuptools.command.easy_install import samefile
-                if not samefile(filename, dst):
-                    shutil.copy2(filename, dst)
-                    filename=dst
-
-            file = open(os.path.join(tmpdir, 'setup.py'), 'w')
-            file.write(
-                "from setuptools import setup\n"
-                "setup(name=%r, version=%r, py_modules=[%r])\n"
-                % (
-                    dists[0].project_name, dists[0].version,
-                    os.path.splitext(basename)[0]
-                )
-            )
-            file.close()
-            return filename
-
-        elif match:
-            raise DistutilsError(
-                "Can't unambiguously interpret project/version identifier %r; "
-                "any dashes in the name or version should be escaped using "
-                "underscores. %r" % (fragment,dists)
-            )
-        else:
-            raise DistutilsError(
-                "Can't process plain .py files without an '#egg=name-version'"
-                " suffix to enable automatic setup script generation."
-            )
-
-    dl_blocksize = 8192
-    def _download_to(self, url, filename):
-        self.info("Downloading %s", url)
-        # Download the file
-        fp, tfp, info = None, None, None
-        try:
-            if '#' in url:
-                url, info = url.split('#', 1)
-            fp = self.open_url(url)
-            if isinstance(fp, urllib2.HTTPError):
-                raise DistutilsError(
-                    "Can't download %s: %s %s" % (url, fp.code,fp.msg)
-                )
-            cs = md5()
-            headers = fp.info()
-            blocknum = 0
-            bs = self.dl_blocksize
-            size = -1
-            if "content-length" in headers:
-                # Some servers return multiple Content-Length headers :(
-                content_length = headers.get("Content-Length")
-                size = int(content_length)
-                self.reporthook(url, filename, blocknum, bs, size)
-            tfp = open(filename,'wb')
-            while True:
-                block = fp.read(bs)
-                if block:
-                    cs.update(block)
-                    tfp.write(block)
-                    blocknum += 1
-                    self.reporthook(url, filename, blocknum, bs, size)
-                else:
-                    break
-            if info: self.check_md5(cs, info, filename, tfp)
-            return headers
-        finally:
-            if fp: fp.close()
-            if tfp: tfp.close()
-
-    def reporthook(self, url, filename, blocknum, blksize, size):
-        pass    # no-op
-
-
-    def open_url(self, url, warning=None):
-        if url.startswith('file:'):
-            return local_open(url)
-        try:
-            return open_with_auth(url)
-        except (ValueError, httplib.InvalidURL), v:
-            msg = ' '.join([str(arg) for arg in v.args])
-            if warning:
-                self.warn(warning, msg)
-            else:
-                raise DistutilsError('%s %s' % (url, msg))
-        except urllib2.HTTPError, v:
-            return v
-        except urllib2.URLError, v:
-            if warning:
-                self.warn(warning, v.reason)
-            else:
-                raise DistutilsError("Download error for %s: %s"
-                                     % (url, v.reason))
-        except httplib.BadStatusLine, v:
-            if warning:
-                self.warn(warning, v.line)
-            else:
-                raise DistutilsError('%s returned a bad status line. '
-                                     'The server might be down, %s' % \
-                                             (url, v.line))
-        except httplib.HTTPException, v:
-            if warning:
-                self.warn(warning, v)
-            else:
-                raise DistutilsError("Download error for %s: %s"
-                                     % (url, v))
-
-    def _download_url(self, scheme, url, tmpdir):
-        # Determine download filename
-        #
-        name = filter(None,urlparse.urlparse(url)[2].split('/'))
-        if name:
-            name = name[-1]
-            while '..' in name:
-                name = name.replace('..','.').replace('\\','_')
-        else:
-            name = "__downloaded__"    # default if URL has no path contents
-
-        if name.endswith('.egg.zip'):
-            name = name[:-4]    # strip the extra .zip before download
-
-        filename = os.path.join(tmpdir,name)
-
-        # Download the file
-        #
-        if scheme=='svn' or scheme.startswith('svn+'):
-            return self._download_svn(url, filename)
-        elif scheme=='git' or scheme.startswith('git+'):
-            return self._download_git(url, filename)
-        elif scheme.startswith('hg+'):
-            return self._download_hg(url, filename)
-        elif scheme=='file':
-            return urllib.url2pathname(urlparse.urlparse(url)[2])
-        else:
-            self.url_ok(url, True)   # raises error if not allowed
-            return self._attempt_download(url, filename)
-
-
-
-    def scan_url(self, url):
-        self.process_url(url, True)
-
-
-    def _attempt_download(self, url, filename):
-        headers = self._download_to(url, filename)
-        if 'html' in headers.get('content-type','').lower():
-            return self._download_html(url, headers, filename)
-        else:
-            return filename
-
-    def _download_html(self, url, headers, filename):
-        file = open(filename)
-        for line in file:
-            if line.strip():
-                # Check for a subversion index page
-                if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
-                    # it's a subversion index page:
-                    file.close()
-                    os.unlink(filename)
-                    return self._download_svn(url, filename)
-                break   # not an index page
-        file.close()
-        os.unlink(filename)
-        raise DistutilsError("Unexpected HTML page found at "+url)
-
-    def _download_svn(self, url, filename):
-        url = url.split('#',1)[0]   # remove any fragment for svn's sake
-        self.info("Doing subversion checkout from %s to %s", url, filename)
-        os.system("svn checkout -q %s %s" % (url, filename))
-        return filename
-
-    def _vcs_split_rev_from_url(self, url, pop_prefix=False):
-        scheme, netloc, path, query, frag = urlparse.urlsplit(url)
-
-        scheme = scheme.split('+', 1)[-1]
-
-        # Some fragment identification fails
-        path = path.split('#',1)[0]
-
-        rev = None
-        if '@' in path:
-            path, rev = path.rsplit('@', 1)
-
-        # Also, discard fragment
-        url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
-
-        return url, rev
-
-    def _download_git(self, url, filename):
-        filename = filename.split('#',1)[0]
-        url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
-
-        self.info("Doing git clone from %s to %s", url, filename)
-        os.system("git clone --quiet %s %s" % (url, filename))
-
-        if rev is not None:
-            self.info("Checking out %s", rev)
-            os.system("(cd %s && git checkout --quiet %s)" % (
-                filename,
-                rev,
-            ))
-
-        return filename
-
-    def _download_hg(self, url, filename):
-        filename = filename.split('#',1)[0]
-        url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
-
-        self.info("Doing hg clone from %s to %s", url, filename)
-        os.system("hg clone --quiet %s %s" % (url, filename))
-
-        if rev is not None:
-            self.info("Updating to %s", rev)
-            os.system("(cd %s && hg up -C -r %s >&-)" % (
-                filename,
-                rev,
-            ))
-
-        return filename
-
-    def debug(self, msg, *args):
-        log.debug(msg, *args)
-
-    def info(self, msg, *args):
-        log.info(msg, *args)
-
-    def warn(self, msg, *args):
-        log.warn(msg, *args)
-
-# This pattern matches a character entity reference (a decimal numeric
-# references, a hexadecimal numeric reference, or a named reference).
-entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
-
-def uchr(c):
-    if not isinstance(c, int):
-        return c
-    if c>255: return unichr(c)
-    return chr(c)
-
-def decode_entity(match):
-    what = match.group(1)
-    if what.startswith('#x'):
-        what = int(what[2:], 16)
-    elif what.startswith('#'):
-        what = int(what[1:])
-    else:
-        from htmlentitydefs import name2codepoint
-        what = name2codepoint.get(what, match.group(0))
-    return uchr(what)
-
-def htmldecode(text):
-    """Decode HTML entities in the given text."""
-    return entity_sub(decode_entity, text)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def socket_timeout(timeout=15):
-    def _socket_timeout(func):
-        def _socket_timeout(*args, **kwargs):
-            old_timeout = socket.getdefaulttimeout()
-            socket.setdefaulttimeout(timeout)
-            try:
-                return func(*args, **kwargs)
-            finally:
-                socket.setdefaulttimeout(old_timeout)
-        return _socket_timeout
-    return _socket_timeout
-
-def _encode_auth(auth):
-    """
-    A function compatible with Python 2.3-3.3 that will encode
-    auth from a URL suitable for an HTTP header.
-    >>> _encode_auth('username%3Apassword')
-    u'dXNlcm5hbWU6cGFzc3dvcmQ='
-    """
-    auth_s = urllib2.unquote(auth)
-    # convert to bytes
-    auth_bytes = auth_s.encode()
-    # use the legacy interface for Python 2.3 support
-    encoded_bytes = base64.encodestring(auth_bytes)
-    # convert back to a string
-    encoded = encoded_bytes.decode()
-    # strip the trailing carriage return
-    return encoded.rstrip()
-
-def open_with_auth(url):
-    """Open a urllib2 request, handling HTTP authentication"""
-
-    scheme, netloc, path, params, query, frag = urlparse.urlparse(url)
-
-    # Double scheme does not raise on Mac OS X as revealed by a
-    # failing test. We would expect "nonnumeric port". Refs #20.
-    if netloc.endswith(':'):
-        raise httplib.InvalidURL("nonnumeric port: ''")
-
-    if scheme in ('http', 'https'):
-        auth, host = urllib2.splituser(netloc)
-    else:
-        auth = None
-
-    if auth:
-        auth = "Basic " + _encode_auth(auth)
-        new_url = urlparse.urlunparse((scheme,host,path,params,query,frag))
-        request = urllib2.Request(new_url)
-        request.add_header("Authorization", auth)
-    else:
-        request = urllib2.Request(url)
-
-    request.add_header('User-Agent', user_agent)
-    fp = urllib2.urlopen(request)
-
-    if auth:
-        # Put authentication info back into request URL if same host,
-        # so that links found on the page will work
-        s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url)
-        if s2==scheme and h2==host:
-            fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2))
-
-    return fp
-
-# adding a timeout to avoid freezing package_index
-open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
-
-
-
-
-
-
-
-
-
-
-
-def fix_sf_url(url):
-    return url      # backward compatibility
-
-def local_open(url):
-    """Read a local path, with special support for directories"""
-    scheme, server, path, param, query, frag = urlparse.urlparse(url)
-    filename = urllib.url2pathname(path)
-    if os.path.isfile(filename):
-        return urllib2.urlopen(url)
-    elif path.endswith('/') and os.path.isdir(filename):
-        files = []
-        for f in os.listdir(filename):
-            if f=='index.html':
-                fp = open(os.path.join(filename,f),'rb')
-                body = fp.read()
-                fp.close()
-                break
-            elif os.path.isdir(os.path.join(filename,f)):
-                f+='/'
-            files.append("<a href=%r>%s</a>" % (f,f))
-        else:
-            body = ("<html><head><title>%s</title>" % url) + \
-                "</head><body>%s</body></html>" % '\n'.join(files)
-        status, message = 200, "OK"
-    else:
-        status, message, body = 404, "Path not found", "Not found"
-
-    return urllib2.HTTPError(url, status, message,
-            {'content-type':'text/html'}, cStringIO.StringIO(body))
-
-
-
-
-
-
-
-
-
-
-
-
-
-# this line is a kludge to keep the trailing blank lines for pje's editor
diff --git a/vendor/distribute-0.6.34/setuptools/sandbox.py b/vendor/distribute-0.6.34/setuptools/sandbox.py
deleted file mode 100644
index 1583b81f268c18f7dc20c86d183d9924f98c5878..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/sandbox.py
+++ /dev/null
@@ -1,293 +0,0 @@
-import os, sys, __builtin__, tempfile, operator, pkg_resources
-if os.name == "java":
-    import org.python.modules.posix.PosixModule as _os
-else:
-    _os = sys.modules[os.name]
-try:
-    _file = file
-except NameError:
-    _file = None
-_open = open
-from distutils.errors import DistutilsError
-__all__ = [
-    "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
-]
-def run_setup(setup_script, args):
-    """Run a distutils setup script, sandboxed in its directory"""
-    old_dir = os.getcwd()
-    save_argv = sys.argv[:]
-    save_path = sys.path[:]
-    setup_dir = os.path.abspath(os.path.dirname(setup_script))
-    temp_dir = os.path.join(setup_dir,'temp')
-    if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
-    save_tmp = tempfile.tempdir
-    save_modules = sys.modules.copy()
-    pr_state = pkg_resources.__getstate__()
-    try:
-        tempfile.tempdir = temp_dir
-        os.chdir(setup_dir)
-        try:
-            sys.argv[:] = [setup_script]+list(args)
-            sys.path.insert(0, setup_dir)
-            DirectorySandbox(setup_dir).run(
-                lambda: execfile(
-                    "setup.py",
-                    {'__file__':setup_script, '__name__':'__main__'}
-                )
-            )
-        except SystemExit, v:
-            if v.args and v.args[0]:
-                raise
-            # Normal exit, just return
-    finally:
-        pkg_resources.__setstate__(pr_state)
-        sys.modules.update(save_modules)
-        # remove any modules imported within the sandbox
-        del_modules = [
-            mod_name for mod_name in sys.modules
-            if mod_name not in save_modules
-            # exclude any encodings modules. See #285
-            and not mod_name.startswith('encodings.')
-        ]
-        map(sys.modules.__delitem__, del_modules)
-        os.chdir(old_dir)
-        sys.path[:] = save_path
-        sys.argv[:] = save_argv
-        tempfile.tempdir = save_tmp
-
-class AbstractSandbox:
-    """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
-
-    _active = False
-
-    def __init__(self):
-        self._attrs = [
-            name for name in dir(_os)
-                if not name.startswith('_') and hasattr(self,name)
-        ]
-
-    def _copy(self, source):
-        for name in self._attrs:
-            setattr(os, name, getattr(source,name))
-
-    def run(self, func):
-        """Run 'func' under os sandboxing"""
-        try:
-            self._copy(self)
-            if _file:
-                __builtin__.file = self._file
-            __builtin__.open = self._open
-            self._active = True
-            return func()
-        finally:
-            self._active = False
-            if _file:
-                __builtin__.file = _file
-            __builtin__.open = _open
-            self._copy(_os)
-
-
-    def _mk_dual_path_wrapper(name):
-        original = getattr(_os,name)
-        def wrap(self,src,dst,*args,**kw):
-            if self._active:
-                src,dst = self._remap_pair(name,src,dst,*args,**kw)
-            return original(src,dst,*args,**kw)
-        return wrap
-
-
-    for name in ["rename", "link", "symlink"]:
-        if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
-
-
-    def _mk_single_path_wrapper(name, original=None):
-        original = original or getattr(_os,name)
-        def wrap(self,path,*args,**kw):
-            if self._active:
-                path = self._remap_input(name,path,*args,**kw)
-            return original(path,*args,**kw)
-        return wrap
-
-    if _file:
-        _file = _mk_single_path_wrapper('file', _file)
-    _open = _mk_single_path_wrapper('open', _open)
-    for name in [
-        "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
-        "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
-        "startfile", "mkfifo", "mknod", "pathconf", "access"
-    ]:
-        if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
-
-
-    def _mk_single_with_return(name):
-        original = getattr(_os,name)
-        def wrap(self,path,*args,**kw):
-            if self._active:
-                path = self._remap_input(name,path,*args,**kw)
-                return self._remap_output(name, original(path,*args,**kw))
-            return original(path,*args,**kw)
-        return wrap
-
-    for name in ['readlink', 'tempnam']:
-        if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
-
-    def _mk_query(name):
-        original = getattr(_os,name)
-        def wrap(self,*args,**kw):
-            retval = original(*args,**kw)
-            if self._active:
-                return self._remap_output(name, retval)
-            return retval
-        return wrap
-
-    for name in ['getcwd', 'tmpnam']:
-        if hasattr(_os,name): locals()[name] = _mk_query(name)
-
-    def _validate_path(self,path):
-        """Called to remap or validate any path, whether input or output"""
-        return path
-
-    def _remap_input(self,operation,path,*args,**kw):
-        """Called for path inputs"""
-        return self._validate_path(path)
-
-    def _remap_output(self,operation,path):
-        """Called for path outputs"""
-        return self._validate_path(path)
-
-    def _remap_pair(self,operation,src,dst,*args,**kw):
-        """Called for path pairs like rename, link, and symlink operations"""
-        return (
-            self._remap_input(operation+'-from',src,*args,**kw),
-            self._remap_input(operation+'-to',dst,*args,**kw)
-        )
-
-
-if hasattr(os, 'devnull'):
-    _EXCEPTIONS = [os.devnull,]
-else:
-    _EXCEPTIONS = []
-
-try:
-    from win32com.client.gencache import GetGeneratePath
-    _EXCEPTIONS.append(GetGeneratePath())
-    del GetGeneratePath
-except ImportError:
-    # it appears pywin32 is not installed, so no need to exclude.
-    pass
-
-class DirectorySandbox(AbstractSandbox):
-    """Restrict operations to a single subdirectory - pseudo-chroot"""
-
-    write_ops = dict.fromkeys([
-        "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
-        "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
-    ])
-
-    def __init__(self, sandbox, exceptions=_EXCEPTIONS):
-        self._sandbox = os.path.normcase(os.path.realpath(sandbox))
-        self._prefix = os.path.join(self._sandbox,'')
-        self._exceptions = [os.path.normcase(os.path.realpath(path)) for path in exceptions]
-        AbstractSandbox.__init__(self)
-
-    def _violation(self, operation, *args, **kw):
-        raise SandboxViolation(operation, args, kw)
-
-    if _file:
-        def _file(self, path, mode='r', *args, **kw):
-            if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
-                self._violation("file", path, mode, *args, **kw)
-            return _file(path,mode,*args,**kw)
-
-    def _open(self, path, mode='r', *args, **kw):
-        if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
-            self._violation("open", path, mode, *args, **kw)
-        return _open(path,mode,*args,**kw)
-
-    def tmpnam(self):
-        self._violation("tmpnam")
-
-    def _ok(self,path):
-        active = self._active
-        try:
-            self._active = False
-            realpath = os.path.normcase(os.path.realpath(path))
-            if (self._exempted(realpath) or realpath == self._sandbox
-                or realpath.startswith(self._prefix)):
-                return True
-        finally:
-            self._active = active
-
-    def _exempted(self, filepath):
-        exception_matches = map(filepath.startswith, self._exceptions)
-        return True in exception_matches
-
-    def _remap_input(self,operation,path,*args,**kw):
-        """Called for path inputs"""
-        if operation in self.write_ops and not self._ok(path):
-            self._violation(operation, os.path.realpath(path), *args, **kw)
-        return path
-
-    def _remap_pair(self,operation,src,dst,*args,**kw):
-        """Called for path pairs like rename, link, and symlink operations"""
-        if not self._ok(src) or not self._ok(dst):
-            self._violation(operation, src, dst, *args, **kw)
-        return (src,dst)
-
-    def open(self, file, flags, mode=0777):
-        """Called for low-level os.open()"""
-        if flags & WRITE_FLAGS and not self._ok(file):
-            self._violation("os.open", file, flags, mode)
-        return _os.open(file,flags,mode)
-
-
-WRITE_FLAGS = reduce(
-    operator.or_,
-    [getattr(_os, a, 0) for a in
-        "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
-)
-
-
-
-
-class SandboxViolation(DistutilsError):
-    """A setup script attempted to modify the filesystem outside the sandbox"""
-
-    def __str__(self):
-        return """SandboxViolation: %s%r %s
-
-The package setup script has attempted to modify files on your system
-that are not within the EasyInstall build area, and has been aborted.
-
-This package cannot be safely installed by EasyInstall, and may not
-support alternate installation locations even if you run its setup
-script by hand.  Please inform the package's author and the EasyInstall
-maintainers to find out if a fix or workaround is available.""" % self.args
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
diff --git a/vendor/distribute-0.6.34/setuptools/script template (dev).py b/vendor/distribute-0.6.34/setuptools/script template (dev).py
deleted file mode 100644
index 6dd9dd45259880e964e42282697a5c1b2d071fb5..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/script template (dev).py	
+++ /dev/null
@@ -1,6 +0,0 @@
-# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
-__requires__ = """%(spec)r"""
-from pkg_resources import require; require("""%(spec)r""")
-del require
-__file__ = """%(dev_path)r"""
-execfile(__file__)
diff --git a/vendor/distribute-0.6.34/setuptools/script template.py b/vendor/distribute-0.6.34/setuptools/script template.py
deleted file mode 100644
index 8dd5d5100177d17e6bd52e566ddeb5909c778c03..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/script template.py	
+++ /dev/null
@@ -1,4 +0,0 @@
-# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
-__requires__ = """%(spec)r"""
-import pkg_resources
-pkg_resources.run_script("""%(spec)r""", """%(script_name)r""")
diff --git a/vendor/distribute-0.6.34/setuptools/tests/__init__.py b/vendor/distribute-0.6.34/setuptools/tests/__init__.py
deleted file mode 100644
index b6988a08d780806209cc3e54a123614cc87907f7..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/__init__.py
+++ /dev/null
@@ -1,349 +0,0 @@
-"""Tests for the 'setuptools' package"""
-import sys
-import os
-import unittest
-import doctest
-import distutils.core
-import distutils.cmd
-from distutils.errors import DistutilsOptionError, DistutilsPlatformError
-from distutils.errors import DistutilsSetupError
-from distutils.core import Extension
-from distutils.version import LooseVersion
-
-import setuptools.dist
-import setuptools.depends as dep
-from setuptools import Feature
-from setuptools.depends import Require
-
-def additional_tests():
-    import doctest, unittest
-    suite = unittest.TestSuite((
-        doctest.DocFileSuite(
-            os.path.join('tests', 'api_tests.txt'),
-            optionflags=doctest.ELLIPSIS, package='pkg_resources',
-            ),
-        ))
-    if sys.platform == 'win32':
-        suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
-    return suite
-
-def makeSetup(**args):
-    """Return distribution from 'setup(**args)', without executing commands"""
-
-    distutils.core._setup_stop_after = "commandline"
-
-    # Don't let system command line leak into tests!
-    args.setdefault('script_args',['install'])
-
-    try:
-        return setuptools.setup(**args)
-    finally:
-        distutils.core._setup_stop_after = None
-
-
-class DependsTests(unittest.TestCase):
-
-    def testExtractConst(self):
-        if not hasattr(dep, 'extract_constant'):
-            # skip on non-bytecode platforms
-            return
-
-        def f1():
-            global x, y, z
-            x = "test"
-            y = z
-
-        # unrecognized name
-        self.assertEqual(dep.extract_constant(f1.func_code,'q', -1), None)
-
-        # constant assigned
-        self.assertEqual(dep.extract_constant(f1.func_code,'x', -1), "test")
-
-        # expression assigned
-        self.assertEqual(dep.extract_constant(f1.func_code,'y', -1), -1)
-
-        # recognized name, not assigned
-        self.assertEqual(dep.extract_constant(f1.func_code,'z', -1), None)
-
-    def testFindModule(self):
-        self.assertRaises(ImportError, dep.find_module, 'no-such.-thing')
-        self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent')
-        f,p,i = dep.find_module('setuptools.tests')
-        f.close()
-
-    def testModuleExtract(self):
-        if not hasattr(dep, 'get_module_constant'):
-            # skip on non-bytecode platforms
-            return
-
-        from email import __version__
-        self.assertEqual(
-            dep.get_module_constant('email','__version__'), __version__
-        )
-        self.assertEqual(
-            dep.get_module_constant('sys','version'), sys.version
-        )
-        self.assertEqual(
-            dep.get_module_constant('setuptools.tests','__doc__'),__doc__
-        )
-
-    def testRequire(self):
-        if not hasattr(dep, 'extract_constant'):
-            # skip on non-bytecode platformsh
-            return
-
-        req = Require('Email','1.0.3','email')
-
-        self.assertEqual(req.name, 'Email')
-        self.assertEqual(req.module, 'email')
-        self.assertEqual(req.requested_version, '1.0.3')
-        self.assertEqual(req.attribute, '__version__')
-        self.assertEqual(req.full_name(), 'Email-1.0.3')
-
-        from email import __version__
-        self.assertEqual(req.get_version(), __version__)
-        self.assertTrue(req.version_ok('1.0.9'))
-        self.assertTrue(not req.version_ok('0.9.1'))
-        self.assertTrue(not req.version_ok('unknown'))
-
-        self.assertTrue(req.is_present())
-        self.assertTrue(req.is_current())
-
-        req = Require('Email 3000','03000','email',format=LooseVersion)
-        self.assertTrue(req.is_present())
-        self.assertTrue(not req.is_current())
-        self.assertTrue(not req.version_ok('unknown'))
-
-        req = Require('Do-what-I-mean','1.0','d-w-i-m')
-        self.assertTrue(not req.is_present())
-        self.assertTrue(not req.is_current())
-
-        req = Require('Tests', None, 'tests', homepage="http://example.com")
-        self.assertEqual(req.format, None)
-        self.assertEqual(req.attribute, None)
-        self.assertEqual(req.requested_version, None)
-        self.assertEqual(req.full_name(), 'Tests')
-        self.assertEqual(req.homepage, 'http://example.com')
-
-        paths = [os.path.dirname(p) for p in __path__]
-        self.assertTrue(req.is_present(paths))
-        self.assertTrue(req.is_current(paths))
-
-
-class DistroTests(unittest.TestCase):
-
-    def setUp(self):
-        self.e1 = Extension('bar.ext',['bar.c'])
-        self.e2 = Extension('c.y', ['y.c'])
-
-        self.dist = makeSetup(
-            packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
-            py_modules=['b.d','x'],
-            ext_modules = (self.e1, self.e2),
-            package_dir = {},
-        )
-
-    def testDistroType(self):
-        self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution))
-
-    def testExcludePackage(self):
-        self.dist.exclude_package('a')
-        self.assertEqual(self.dist.packages, ['b','c'])
-
-        self.dist.exclude_package('b')
-        self.assertEqual(self.dist.packages, ['c'])
-        self.assertEqual(self.dist.py_modules, ['x'])
-        self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
-
-        self.dist.exclude_package('c')
-        self.assertEqual(self.dist.packages, [])
-        self.assertEqual(self.dist.py_modules, ['x'])
-        self.assertEqual(self.dist.ext_modules, [self.e1])
-
-        # test removals from unspecified options
-        makeSetup().exclude_package('x')
-
-    def testIncludeExclude(self):
-        # remove an extension
-        self.dist.exclude(ext_modules=[self.e1])
-        self.assertEqual(self.dist.ext_modules, [self.e2])
-
-        # add it back in
-        self.dist.include(ext_modules=[self.e1])
-        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
-
-        # should not add duplicate
-        self.dist.include(ext_modules=[self.e1])
-        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
-
-    def testExcludePackages(self):
-        self.dist.exclude(packages=['c','b','a'])
-        self.assertEqual(self.dist.packages, [])
-        self.assertEqual(self.dist.py_modules, ['x'])
-        self.assertEqual(self.dist.ext_modules, [self.e1])
-
-    def testEmpty(self):
-        dist = makeSetup()
-        dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
-        dist = makeSetup()
-        dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
-
-    def testContents(self):
-        self.assertTrue(self.dist.has_contents_for('a'))
-        self.dist.exclude_package('a')
-        self.assertTrue(not self.dist.has_contents_for('a'))
-
-        self.assertTrue(self.dist.has_contents_for('b'))
-        self.dist.exclude_package('b')
-        self.assertTrue(not self.dist.has_contents_for('b'))
-
-        self.assertTrue(self.dist.has_contents_for('c'))
-        self.dist.exclude_package('c')
-        self.assertTrue(not self.dist.has_contents_for('c'))
-
-    def testInvalidIncludeExclude(self):
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, nonexistent_option='x'
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, nonexistent_option='x'
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, packages={'x':'y'}
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, packages={'x':'y'}
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, ext_modules={'x':'y'}
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, ext_modules={'x':'y'}
-        )
-
-        self.assertRaises(DistutilsSetupError,
-            self.dist.include, package_dir=['q']
-        )
-        self.assertRaises(DistutilsSetupError,
-            self.dist.exclude, package_dir=['q']
-        )
-
-
-class FeatureTests(unittest.TestCase):
-
-    def setUp(self):
-        self.req = Require('Distutils','1.0.3','distutils')
-        self.dist = makeSetup(
-            features={
-                'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
-                'bar': Feature("bar",  standard=True, packages=['pkg.bar'],
-                               py_modules=['bar_et'], remove=['bar.ext'],
-                       ),
-                'baz': Feature(
-                        "baz", optional=False, packages=['pkg.baz'],
-                        scripts = ['scripts/baz_it'],
-                        libraries=[('libfoo','foo/foofoo.c')]
-                       ),
-                'dwim': Feature("DWIM", available=False, remove='bazish'),
-            },
-            script_args=['--without-bar', 'install'],
-            packages = ['pkg.bar', 'pkg.foo'],
-            py_modules = ['bar_et', 'bazish'],
-            ext_modules = [Extension('bar.ext',['bar.c'])]
-        )
-
-    def testDefaults(self):
-        self.assertTrue(not
-            Feature(
-                "test",standard=True,remove='x',available=False
-            ).include_by_default()
-        )
-        self.assertTrue(
-            Feature("test",standard=True,remove='x').include_by_default()
-        )
-        # Feature must have either kwargs, removes, or require_features
-        self.assertRaises(DistutilsSetupError, Feature, "test")
-
-    def testAvailability(self):
-        self.assertRaises(
-            DistutilsPlatformError,
-            self.dist.features['dwim'].include_in, self.dist
-        )
-
-    def testFeatureOptions(self):
-        dist = self.dist
-        self.assertTrue(
-            ('with-dwim',None,'include DWIM') in dist.feature_options
-        )
-        self.assertTrue(
-            ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
-        )
-        self.assertTrue(
-            ('with-bar',None,'include bar (default)') in dist.feature_options
-        )
-        self.assertTrue(
-            ('without-bar',None,'exclude bar') in dist.feature_options
-        )
-        self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
-        self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
-        self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
-        self.assertTrue(not 'without-baz' in dist.feature_negopt)
-
-    def testUseFeatures(self):
-        dist = self.dist
-        self.assertEqual(dist.with_foo,1)
-        self.assertEqual(dist.with_bar,0)
-        self.assertEqual(dist.with_baz,1)
-        self.assertTrue(not 'bar_et' in dist.py_modules)
-        self.assertTrue(not 'pkg.bar' in dist.packages)
-        self.assertTrue('pkg.baz' in dist.packages)
-        self.assertTrue('scripts/baz_it' in dist.scripts)
-        self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries)
-        self.assertEqual(dist.ext_modules,[])
-        self.assertEqual(dist.require_features, [self.req])
-
-        # If we ask for bar, it should fail because we explicitly disabled
-        # it on the command line
-        self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
-
-    def testFeatureWithInvalidRemove(self):
-        self.assertRaises(
-            SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
-        )
-
-class TestCommandTests(unittest.TestCase):
-
-    def testTestIsCommand(self):
-        test_cmd = makeSetup().get_command_obj('test')
-        self.assertTrue(isinstance(test_cmd, distutils.cmd.Command))
-
-    def testLongOptSuiteWNoDefault(self):
-        ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
-        ts1 = ts1.get_command_obj('test')
-        ts1.ensure_finalized()
-        self.assertEqual(ts1.test_suite, 'foo.tests.suite')
-
-    def testDefaultSuite(self):
-        ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
-        ts2.ensure_finalized()
-        self.assertEqual(ts2.test_suite, 'bar.tests.suite')
-
-    def testDefaultWModuleOnCmdLine(self):
-        ts3 = makeSetup(
-            test_suite='bar.tests',
-            script_args=['test','-m','foo.tests']
-        ).get_command_obj('test')
-        ts3.ensure_finalized()
-        self.assertEqual(ts3.test_module, 'foo.tests')
-        self.assertEqual(ts3.test_suite,  'foo.tests.test_suite')
-
-    def testConflictingOptions(self):
-        ts4 = makeSetup(
-            script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
-        ).get_command_obj('test')
-        self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
-
-    def testNoSuite(self):
-        ts5 = makeSetup().get_command_obj('test')
-        ts5.ensure_finalized()
-        self.assertEqual(ts5.test_suite, None)
diff --git a/vendor/distribute-0.6.34/setuptools/tests/doctest.py b/vendor/distribute-0.6.34/setuptools/tests/doctest.py
deleted file mode 100644
index cc1e06c398b1e861f04bdef5eba9eb6b92b13aa7..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/doctest.py
+++ /dev/null
@@ -1,2683 +0,0 @@
-# Module doctest.
-# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
-# Major enhancements and refactoring by:
-#     Jim Fulton
-#     Edward Loper
-
-# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
-
-try:
-    basestring
-except NameError:
-    basestring = str,unicode
-
-try:
-    enumerate
-except NameError:
-    def enumerate(seq):
-        return zip(range(len(seq)),seq)
-
-r"""Module doctest -- a framework for running examples in docstrings.
-
-In simplest use, end each module M to be tested with:
-
-def _test():
-    import doctest
-    doctest.testmod()
-
-if __name__ == "__main__":
-    _test()
-
-Then running the module as a script will cause the examples in the
-docstrings to get executed and verified:
-
-python M.py
-
-This won't display anything unless an example fails, in which case the
-failing example(s) and the cause(s) of the failure(s) are printed to stdout
-(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
-line of output is "Test failed.".
-
-Run it with the -v switch instead:
-
-python M.py -v
-
-and a detailed report of all examples tried is printed to stdout, along
-with assorted summaries at the end.
-
-You can force verbose mode by passing "verbose=True" to testmod, or prohibit
-it by passing "verbose=False".  In either of those cases, sys.argv is not
-examined by testmod.
-
-There are a variety of other ways to run doctests, including integration
-with the unittest framework, and support for running non-Python text
-files containing doctests.  There are also many ways to override parts
-of doctest's default behaviors.  See the Library Reference Manual for
-details.
-"""
-
-__docformat__ = 'reStructuredText en'
-
-__all__ = [
-    # 0, Option Flags
-    'register_optionflag',
-    'DONT_ACCEPT_TRUE_FOR_1',
-    'DONT_ACCEPT_BLANKLINE',
-    'NORMALIZE_WHITESPACE',
-    'ELLIPSIS',
-    'IGNORE_EXCEPTION_DETAIL',
-    'COMPARISON_FLAGS',
-    'REPORT_UDIFF',
-    'REPORT_CDIFF',
-    'REPORT_NDIFF',
-    'REPORT_ONLY_FIRST_FAILURE',
-    'REPORTING_FLAGS',
-    # 1. Utility Functions
-    'is_private',
-    # 2. Example & DocTest
-    'Example',
-    'DocTest',
-    # 3. Doctest Parser
-    'DocTestParser',
-    # 4. Doctest Finder
-    'DocTestFinder',
-    # 5. Doctest Runner
-    'DocTestRunner',
-    'OutputChecker',
-    'DocTestFailure',
-    'UnexpectedException',
-    'DebugRunner',
-    # 6. Test Functions
-    'testmod',
-    'testfile',
-    'run_docstring_examples',
-    # 7. Tester
-    'Tester',
-    # 8. Unittest Support
-    'DocTestSuite',
-    'DocFileSuite',
-    'set_unittest_reportflags',
-    # 9. Debugging Support
-    'script_from_examples',
-    'testsource',
-    'debug_src',
-    'debug',
-]
-
-import __future__
-
-import sys, traceback, inspect, linecache, os, re, types
-import unittest, difflib, pdb, tempfile
-import warnings
-from StringIO import StringIO
-
-# Don't whine about the deprecated is_private function in this
-# module's tests.
-warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
-                        __name__, 0)
-
-# There are 4 basic classes:
-#  - Example: a <source, want> pair, plus an intra-docstring line number.
-#  - DocTest: a collection of examples, parsed from a docstring, plus
-#    info about where the docstring came from (name, filename, lineno).
-#  - DocTestFinder: extracts DocTests from a given object's docstring and
-#    its contained objects' docstrings.
-#  - DocTestRunner: runs DocTest cases, and accumulates statistics.
-#
-# So the basic picture is:
-#
-#                             list of:
-# +------+                   +---------+                   +-------+
-# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
-# +------+                   +---------+                   +-------+
-#                            | Example |
-#                            |   ...   |
-#                            | Example |
-#                            +---------+
-
-# Option constants.
-
-OPTIONFLAGS_BY_NAME = {}
-def register_optionflag(name):
-    flag = 1 << len(OPTIONFLAGS_BY_NAME)
-    OPTIONFLAGS_BY_NAME[name] = flag
-    return flag
-
-DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
-DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
-NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
-ELLIPSIS = register_optionflag('ELLIPSIS')
-IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
-
-COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
-                    DONT_ACCEPT_BLANKLINE |
-                    NORMALIZE_WHITESPACE |
-                    ELLIPSIS |
-                    IGNORE_EXCEPTION_DETAIL)
-
-REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
-REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
-REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
-REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
-
-REPORTING_FLAGS = (REPORT_UDIFF |
-                   REPORT_CDIFF |
-                   REPORT_NDIFF |
-                   REPORT_ONLY_FIRST_FAILURE)
-
-# Special string markers for use in `want` strings:
-BLANKLINE_MARKER = '<BLANKLINE>'
-ELLIPSIS_MARKER = '...'
-
-######################################################################
-## Table of Contents
-######################################################################
-#  1. Utility Functions
-#  2. Example & DocTest -- store test cases
-#  3. DocTest Parser -- extracts examples from strings
-#  4. DocTest Finder -- extracts test cases from objects
-#  5. DocTest Runner -- runs test cases
-#  6. Test Functions -- convenient wrappers for testing
-#  7. Tester Class -- for backwards compatibility
-#  8. Unittest Support
-#  9. Debugging Support
-# 10. Example Usage
-
-######################################################################
-## 1. Utility Functions
-######################################################################
-
-def is_private(prefix, base):
-    """prefix, base -> true iff name prefix + "." + base is "private".
-
-    Prefix may be an empty string, and base does not contain a period.
-    Prefix is ignored (although functions you write conforming to this
-    protocol may make use of it).
-    Return true iff base begins with an (at least one) underscore, but
-    does not both begin and end with (at least) two underscores.
-
-    >>> is_private("a.b", "my_func")
-    False
-    >>> is_private("____", "_my_func")
-    True
-    >>> is_private("someclass", "__init__")
-    False
-    >>> is_private("sometypo", "__init_")
-    True
-    >>> is_private("x.y.z", "_")
-    True
-    >>> is_private("_x.y.z", "__")
-    False
-    >>> is_private("", "")  # senseless but consistent
-    False
-    """
-    warnings.warn("is_private is deprecated; it wasn't useful; "
-                  "examine DocTestFinder.find() lists instead",
-                  DeprecationWarning, stacklevel=2)
-    return base[:1] == "_" and not base[:2] == "__" == base[-2:]
-
-def _extract_future_flags(globs):
-    """
-    Return the compiler-flags associated with the future features that
-    have been imported into the given namespace (globs).
-    """
-    flags = 0
-    for fname in __future__.all_feature_names:
-        feature = globs.get(fname, None)
-        if feature is getattr(__future__, fname):
-            flags |= feature.compiler_flag
-    return flags
-
-def _normalize_module(module, depth=2):
-    """
-    Return the module specified by `module`.  In particular:
-      - If `module` is a module, then return module.
-      - If `module` is a string, then import and return the
-        module with that name.
-      - If `module` is None, then return the calling module.
-        The calling module is assumed to be the module of
-        the stack frame at the given depth in the call stack.
-    """
-    if inspect.ismodule(module):
-        return module
-    elif isinstance(module, (str, unicode)):
-        return __import__(module, globals(), locals(), ["*"])
-    elif module is None:
-        return sys.modules[sys._getframe(depth).f_globals['__name__']]
-    else:
-        raise TypeError("Expected a module, string, or None")
-
-def _indent(s, indent=4):
-    """
-    Add the given number of space characters to the beginning every
-    non-blank line in `s`, and return the result.
-    """
-    # This regexp matches the start of non-blank lines:
-    return re.sub('(?m)^(?!$)', indent*' ', s)
-
-def _exception_traceback(exc_info):
-    """
-    Return a string containing a traceback message for the given
-    exc_info tuple (as returned by sys.exc_info()).
-    """
-    # Get a traceback message.
-    excout = StringIO()
-    exc_type, exc_val, exc_tb = exc_info
-    traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
-    return excout.getvalue()
-
-# Override some StringIO methods.
-class _SpoofOut(StringIO):
-    def getvalue(self):
-        result = StringIO.getvalue(self)
-        # If anything at all was written, make sure there's a trailing
-        # newline.  There's no way for the expected output to indicate
-        # that a trailing newline is missing.
-        if result and not result.endswith("\n"):
-            result += "\n"
-        # Prevent softspace from screwing up the next test case, in
-        # case they used print with a trailing comma in an example.
-        if hasattr(self, "softspace"):
-            del self.softspace
-        return result
-
-    def truncate(self,   size=None):
-        StringIO.truncate(self, size)
-        if hasattr(self, "softspace"):
-            del self.softspace
-
-# Worst-case linear-time ellipsis matching.
-def _ellipsis_match(want, got):
-    """
-    Essentially the only subtle case:
-    >>> _ellipsis_match('aa...aa', 'aaa')
-    False
-    """
-    if want.find(ELLIPSIS_MARKER)==-1:
-        return want == got
-
-    # Find "the real" strings.
-    ws = want.split(ELLIPSIS_MARKER)
-    assert len(ws) >= 2
-
-    # Deal with exact matches possibly needed at one or both ends.
-    startpos, endpos = 0, len(got)
-    w = ws[0]
-    if w:   # starts with exact match
-        if got.startswith(w):
-            startpos = len(w)
-            del ws[0]
-        else:
-            return False
-    w = ws[-1]
-    if w:   # ends with exact match
-        if got.endswith(w):
-            endpos -= len(w)
-            del ws[-1]
-        else:
-            return False
-
-    if startpos > endpos:
-        # Exact end matches required more characters than we have, as in
-        # _ellipsis_match('aa...aa', 'aaa')
-        return False
-
-    # For the rest, we only need to find the leftmost non-overlapping
-    # match for each piece.  If there's no overall match that way alone,
-    # there's no overall match period.
-    for w in ws:
-        # w may be '' at times, if there are consecutive ellipses, or
-        # due to an ellipsis at the start or end of `want`.  That's OK.
-        # Search for an empty string succeeds, and doesn't change startpos.
-        startpos = got.find(w, startpos, endpos)
-        if startpos < 0:
-            return False
-        startpos += len(w)
-
-    return True
-
-def _comment_line(line):
-    "Return a commented form of the given line"
-    line = line.rstrip()
-    if line:
-        return '# '+line
-    else:
-        return '#'
-
-class _OutputRedirectingPdb(pdb.Pdb):
-    """
-    A specialized version of the python debugger that redirects stdout
-    to a given stream when interacting with the user.  Stdout is *not*
-    redirected when traced code is executed.
-    """
-    def __init__(self, out):
-        self.__out = out
-        pdb.Pdb.__init__(self)
-
-    def trace_dispatch(self, *args):
-        # Redirect stdout to the given stream.
-        save_stdout = sys.stdout
-        sys.stdout = self.__out
-        # Call Pdb's trace dispatch method.
-        try:
-            return pdb.Pdb.trace_dispatch(self, *args)
-        finally:
-            sys.stdout = save_stdout
-
-# [XX] Normalize with respect to os.path.pardir?
-def _module_relative_path(module, path):
-    if not inspect.ismodule(module):
-        raise TypeError, 'Expected a module: %r' % module
-    if path.startswith('/'):
-        raise ValueError, 'Module-relative files may not have absolute paths'
-
-    # Find the base directory for the path.
-    if hasattr(module, '__file__'):
-        # A normal module/package
-        basedir = os.path.split(module.__file__)[0]
-    elif module.__name__ == '__main__':
-        # An interactive session.
-        if len(sys.argv)>0 and sys.argv[0] != '':
-            basedir = os.path.split(sys.argv[0])[0]
-        else:
-            basedir = os.curdir
-    else:
-        # A module w/o __file__ (this includes builtins)
-        raise ValueError("Can't resolve paths relative to the module " +
-                         module + " (it has no __file__)")
-
-    # Combine the base directory and the path.
-    return os.path.join(basedir, *(path.split('/')))
-
-######################################################################
-## 2. Example & DocTest
-######################################################################
-## - An "example" is a <source, want> pair, where "source" is a
-##   fragment of source code, and "want" is the expected output for
-##   "source."  The Example class also includes information about
-##   where the example was extracted from.
-##
-## - A "doctest" is a collection of examples, typically extracted from
-##   a string (such as an object's docstring).  The DocTest class also
-##   includes information about where the string was extracted from.
-
-class Example:
-    """
-    A single doctest example, consisting of source code and expected
-    output.  `Example` defines the following attributes:
-
-      - source: A single Python statement, always ending with a newline.
-        The constructor adds a newline if needed.
-
-      - want: The expected output from running the source code (either
-        from stdout, or a traceback in case of exception).  `want` ends
-        with a newline unless it's empty, in which case it's an empty
-        string.  The constructor adds a newline if needed.
-
-      - exc_msg: The exception message generated by the example, if
-        the example is expected to generate an exception; or `None` if
-        it is not expected to generate an exception.  This exception
-        message is compared against the return value of
-        `traceback.format_exception_only()`.  `exc_msg` ends with a
-        newline unless it's `None`.  The constructor adds a newline
-        if needed.
-
-      - lineno: The line number within the DocTest string containing
-        this Example where the Example begins.  This line number is
-        zero-based, with respect to the beginning of the DocTest.
-
-      - indent: The example's indentation in the DocTest string.
-        I.e., the number of space characters that preceed the
-        example's first prompt.
-
-      - options: A dictionary mapping from option flags to True or
-        False, which is used to override default options for this
-        example.  Any option flags not contained in this dictionary
-        are left at their default value (as specified by the
-        DocTestRunner's optionflags).  By default, no options are set.
-    """
-    def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
-                 options=None):
-        # Normalize inputs.
-        if not source.endswith('\n'):
-            source += '\n'
-        if want and not want.endswith('\n'):
-            want += '\n'
-        if exc_msg is not None and not exc_msg.endswith('\n'):
-            exc_msg += '\n'
-        # Store properties.
-        self.source = source
-        self.want = want
-        self.lineno = lineno
-        self.indent = indent
-        if options is None: options = {}
-        self.options = options
-        self.exc_msg = exc_msg
-
-class DocTest:
-    """
-    A collection of doctest examples that should be run in a single
-    namespace.  Each `DocTest` defines the following attributes:
-
-      - examples: the list of examples.
-
-      - globs: The namespace (aka globals) that the examples should
-        be run in.
-
-      - name: A name identifying the DocTest (typically, the name of
-        the object whose docstring this DocTest was extracted from).
-
-      - filename: The name of the file that this DocTest was extracted
-        from, or `None` if the filename is unknown.
-
-      - lineno: The line number within filename where this DocTest
-        begins, or `None` if the line number is unavailable.  This
-        line number is zero-based, with respect to the beginning of
-        the file.
-
-      - docstring: The string that the examples were extracted from,
-        or `None` if the string is unavailable.
-    """
-    def __init__(self, examples, globs, name, filename, lineno, docstring):
-        """
-        Create a new DocTest containing the given examples.  The
-        DocTest's globals are initialized with a copy of `globs`.
-        """
-        assert not isinstance(examples, basestring), \
-               "DocTest no longer accepts str; use DocTestParser instead"
-        self.examples = examples
-        self.docstring = docstring
-        self.globs = globs.copy()
-        self.name = name
-        self.filename = filename
-        self.lineno = lineno
-
-    def __repr__(self):
-        if len(self.examples) == 0:
-            examples = 'no examples'
-        elif len(self.examples) == 1:
-            examples = '1 example'
-        else:
-            examples = '%d examples' % len(self.examples)
-        return ('<DocTest %s from %s:%s (%s)>' %
-                (self.name, self.filename, self.lineno, examples))
-
-
-    # This lets us sort tests by name:
-    def __cmp__(self, other):
-        if not isinstance(other, DocTest):
-            return -1
-        return cmp((self.name, self.filename, self.lineno, id(self)),
-                   (other.name, other.filename, other.lineno, id(other)))
-
-######################################################################
-## 3. DocTestParser
-######################################################################
-
-class DocTestParser:
-    """
-    A class used to parse strings containing doctest examples.
-    """
-    # This regular expression is used to find doctest examples in a
-    # string.  It defines three groups: `source` is the source code
-    # (including leading indentation and prompts); `indent` is the
-    # indentation of the first (PS1) line of the source code; and
-    # `want` is the expected output (including leading indentation).
-    _EXAMPLE_RE = re.compile(r'''
-        # Source consists of a PS1 line followed by zero or more PS2 lines.
-        (?P<source>
-            (?:^(?P<indent> [ ]*) >>>    .*)    # PS1 line
-            (?:\n           [ ]*  \.\.\. .*)*)  # PS2 lines
-        \n?
-        # Want consists of any non-blank lines that do not start with PS1.
-        (?P<want> (?:(?![ ]*$)    # Not a blank line
-                     (?![ ]*>>>)  # Not a line starting with PS1
-                     .*$\n?       # But any other line
-                  )*)
-        ''', re.MULTILINE | re.VERBOSE)
-
-    # A regular expression for handling `want` strings that contain
-    # expected exceptions.  It divides `want` into three pieces:
-    #    - the traceback header line (`hdr`)
-    #    - the traceback stack (`stack`)
-    #    - the exception message (`msg`), as generated by
-    #      traceback.format_exception_only()
-    # `msg` may have multiple lines.  We assume/require that the
-    # exception message is the first non-indented line starting with a word
-    # character following the traceback header line.
-    _EXCEPTION_RE = re.compile(r"""
-        # Grab the traceback header.  Different versions of Python have
-        # said different things on the first traceback line.
-        ^(?P<hdr> Traceback\ \(
-            (?: most\ recent\ call\ last
-            |   innermost\ last
-            ) \) :
-        )
-        \s* $                # toss trailing whitespace on the header.
-        (?P<stack> .*?)      # don't blink: absorb stuff until...
-        ^ (?P<msg> \w+ .*)   #     a line *starts* with alphanum.
-        """, re.VERBOSE | re.MULTILINE | re.DOTALL)
-
-    # A callable returning a true value iff its argument is a blank line
-    # or contains a single comment.
-    _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
-
-    def parse(self, string, name='<string>'):
-        """
-        Divide the given string into examples and intervening text,
-        and return them as a list of alternating Examples and strings.
-        Line numbers for the Examples are 0-based.  The optional
-        argument `name` is a name identifying this string, and is only
-        used for error messages.
-        """
-        string = string.expandtabs()
-        # If all lines begin with the same indentation, then strip it.
-        min_indent = self._min_indent(string)
-        if min_indent > 0:
-            string = '\n'.join([l[min_indent:] for l in string.split('\n')])
-
-        output = []
-        charno, lineno = 0, 0
-        # Find all doctest examples in the string:
-        for m in self._EXAMPLE_RE.finditer(string):
-            # Add the pre-example text to `output`.
-            output.append(string[charno:m.start()])
-            # Update lineno (lines before this example)
-            lineno += string.count('\n', charno, m.start())
-            # Extract info from the regexp match.
-            (source, options, want, exc_msg) = \
-                     self._parse_example(m, name, lineno)
-            # Create an Example, and add it to the list.
-            if not self._IS_BLANK_OR_COMMENT(source):
-                output.append( Example(source, want, exc_msg,
-                                    lineno=lineno,
-                                    indent=min_indent+len(m.group('indent')),
-                                    options=options) )
-            # Update lineno (lines inside this example)
-            lineno += string.count('\n', m.start(), m.end())
-            # Update charno.
-            charno = m.end()
-        # Add any remaining post-example text to `output`.
-        output.append(string[charno:])
-        return output
-
-    def get_doctest(self, string, globs, name, filename, lineno):
-        """
-        Extract all doctest examples from the given string, and
-        collect them into a `DocTest` object.
-
-        `globs`, `name`, `filename`, and `lineno` are attributes for
-        the new `DocTest` object.  See the documentation for `DocTest`
-        for more information.
-        """
-        return DocTest(self.get_examples(string, name), globs,
-                       name, filename, lineno, string)
-
-    def get_examples(self, string, name='<string>'):
-        """
-        Extract all doctest examples from the given string, and return
-        them as a list of `Example` objects.  Line numbers are
-        0-based, because it's most common in doctests that nothing
-        interesting appears on the same line as opening triple-quote,
-        and so the first interesting line is called \"line 1\" then.
-
-        The optional argument `name` is a name identifying this
-        string, and is only used for error messages.
-        """
-        return [x for x in self.parse(string, name)
-                if isinstance(x, Example)]
-
-    def _parse_example(self, m, name, lineno):
-        """
-        Given a regular expression match from `_EXAMPLE_RE` (`m`),
-        return a pair `(source, want)`, where `source` is the matched
-        example's source code (with prompts and indentation stripped);
-        and `want` is the example's expected output (with indentation
-        stripped).
-
-        `name` is the string's name, and `lineno` is the line number
-        where the example starts; both are used for error messages.
-        """
-        # Get the example's indentation level.
-        indent = len(m.group('indent'))
-
-        # Divide source into lines; check that they're properly
-        # indented; and then strip their indentation & prompts.
-        source_lines = m.group('source').split('\n')
-        self._check_prompt_blank(source_lines, indent, name, lineno)
-        self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
-        source = '\n'.join([sl[indent+4:] for sl in source_lines])
-
-        # Divide want into lines; check that it's properly indented; and
-        # then strip the indentation.  Spaces before the last newline should
-        # be preserved, so plain rstrip() isn't good enough.
-        want = m.group('want')
-        want_lines = want.split('\n')
-        if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
-            del want_lines[-1]  # forget final newline & spaces after it
-        self._check_prefix(want_lines, ' '*indent, name,
-                           lineno + len(source_lines))
-        want = '\n'.join([wl[indent:] for wl in want_lines])
-
-        # If `want` contains a traceback message, then extract it.
-        m = self._EXCEPTION_RE.match(want)
-        if m:
-            exc_msg = m.group('msg')
-        else:
-            exc_msg = None
-
-        # Extract options from the source.
-        options = self._find_options(source, name, lineno)
-
-        return source, options, want, exc_msg
-
-    # This regular expression looks for option directives in the
-    # source code of an example.  Option directives are comments
-    # starting with "doctest:".  Warning: this may give false
-    # positives for string-literals that contain the string
-    # "#doctest:".  Eliminating these false positives would require
-    # actually parsing the string; but we limit them by ignoring any
-    # line containing "#doctest:" that is *followed* by a quote mark.
-    _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
-                                      re.MULTILINE)
-
-    def _find_options(self, source, name, lineno):
-        """
-        Return a dictionary containing option overrides extracted from
-        option directives in the given source string.
-
-        `name` is the string's name, and `lineno` is the line number
-        where the example starts; both are used for error messages.
-        """
-        options = {}
-        # (note: with the current regexp, this will match at most once:)
-        for m in self._OPTION_DIRECTIVE_RE.finditer(source):
-            option_strings = m.group(1).replace(',', ' ').split()
-            for option in option_strings:
-                if (option[0] not in '+-' or
-                    option[1:] not in OPTIONFLAGS_BY_NAME):
-                    raise ValueError('line %r of the doctest for %s '
-                                     'has an invalid option: %r' %
-                                     (lineno+1, name, option))
-                flag = OPTIONFLAGS_BY_NAME[option[1:]]
-                options[flag] = (option[0] == '+')
-        if options and self._IS_BLANK_OR_COMMENT(source):
-            raise ValueError('line %r of the doctest for %s has an option '
-                             'directive on a line with no example: %r' %
-                             (lineno, name, source))
-        return options
-
-    # This regular expression finds the indentation of every non-blank
-    # line in a string.
-    _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
-
-    def _min_indent(self, s):
-        "Return the minimum indentation of any non-blank line in `s`"
-        indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
-        if len(indents) > 0:
-            return min(indents)
-        else:
-            return 0
-
-    def _check_prompt_blank(self, lines, indent, name, lineno):
-        """
-        Given the lines of a source string (including prompts and
-        leading indentation), check to make sure that every prompt is
-        followed by a space character.  If any line is not followed by
-        a space character, then raise ValueError.
-        """
-        for i, line in enumerate(lines):
-            if len(line) >= indent+4 and line[indent+3] != ' ':
-                raise ValueError('line %r of the docstring for %s '
-                                 'lacks blank after %s: %r' %
-                                 (lineno+i+1, name,
-                                  line[indent:indent+3], line))
-
-    def _check_prefix(self, lines, prefix, name, lineno):
-        """
-        Check that every line in the given list starts with the given
-        prefix; if any line does not, then raise a ValueError.
-        """
-        for i, line in enumerate(lines):
-            if line and not line.startswith(prefix):
-                raise ValueError('line %r of the docstring for %s has '
-                                 'inconsistent leading whitespace: %r' %
-                                 (lineno+i+1, name, line))
-
-
-######################################################################
-## 4. DocTest Finder
-######################################################################
-
-class DocTestFinder:
-    """
-    A class used to extract the DocTests that are relevant to a given
-    object, from its docstring and the docstrings of its contained
-    objects.  Doctests can currently be extracted from the following
-    object types: modules, functions, classes, methods, staticmethods,
-    classmethods, and properties.
-    """
-
-    def __init__(self, verbose=False, parser=DocTestParser(),
-                 recurse=True, _namefilter=None, exclude_empty=True):
-        """
-        Create a new doctest finder.
-
-        The optional argument `parser` specifies a class or
-        function that should be used to create new DocTest objects (or
-        objects that implement the same interface as DocTest).  The
-        signature for this factory function should match the signature
-        of the DocTest constructor.
-
-        If the optional argument `recurse` is false, then `find` will
-        only examine the given object, and not any contained objects.
-
-        If the optional argument `exclude_empty` is false, then `find`
-        will include tests for objects with empty docstrings.
-        """
-        self._parser = parser
-        self._verbose = verbose
-        self._recurse = recurse
-        self._exclude_empty = exclude_empty
-        # _namefilter is undocumented, and exists only for temporary backward-
-        # compatibility support of testmod's deprecated isprivate mess.
-        self._namefilter = _namefilter
-
-    def find(self, obj, name=None, module=None, globs=None,
-             extraglobs=None):
-        """
-        Return a list of the DocTests that are defined by the given
-        object's docstring, or by any of its contained objects'
-        docstrings.
-
-        The optional parameter `module` is the module that contains
-        the given object.  If the module is not specified or is None, then
-        the test finder will attempt to automatically determine the
-        correct module.  The object's module is used:
-
-            - As a default namespace, if `globs` is not specified.
-            - To prevent the DocTestFinder from extracting DocTests
-              from objects that are imported from other modules.
-            - To find the name of the file containing the object.
-            - To help find the line number of the object within its
-              file.
-
-        Contained objects whose module does not match `module` are ignored.
-
-        If `module` is False, no attempt to find the module will be made.
-        This is obscure, of use mostly in tests:  if `module` is False, or
-        is None but cannot be found automatically, then all objects are
-        considered to belong to the (non-existent) module, so all contained
-        objects will (recursively) be searched for doctests.
-
-        The globals for each DocTest is formed by combining `globs`
-        and `extraglobs` (bindings in `extraglobs` override bindings
-        in `globs`).  A new copy of the globals dictionary is created
-        for each DocTest.  If `globs` is not specified, then it
-        defaults to the module's `__dict__`, if specified, or {}
-        otherwise.  If `extraglobs` is not specified, then it defaults
-        to {}.
-
-        """
-        # If name was not specified, then extract it from the object.
-        if name is None:
-            name = getattr(obj, '__name__', None)
-            if name is None:
-                raise ValueError("DocTestFinder.find: name must be given "
-                        "when obj.__name__ doesn't exist: %r" %
-                                 (type(obj),))
-
-        # Find the module that contains the given object (if obj is
-        # a module, then module=obj.).  Note: this may fail, in which
-        # case module will be None.
-        if module is False:
-            module = None
-        elif module is None:
-            module = inspect.getmodule(obj)
-
-        # Read the module's source code.  This is used by
-        # DocTestFinder._find_lineno to find the line number for a
-        # given object's docstring.
-        try:
-            file = inspect.getsourcefile(obj) or inspect.getfile(obj)
-            source_lines = linecache.getlines(file)
-            if not source_lines:
-                source_lines = None
-        except TypeError:
-            source_lines = None
-
-        # Initialize globals, and merge in extraglobs.
-        if globs is None:
-            if module is None:
-                globs = {}
-            else:
-                globs = module.__dict__.copy()
-        else:
-            globs = globs.copy()
-        if extraglobs is not None:
-            globs.update(extraglobs)
-
-        # Recursively expore `obj`, extracting DocTests.
-        tests = []
-        self._find(tests, obj, name, module, source_lines, globs, {})
-        return tests
-
-    def _filter(self, obj, prefix, base):
-        """
-        Return true if the given object should not be examined.
-        """
-        return (self._namefilter is not None and
-                self._namefilter(prefix, base))
-
-    def _from_module(self, module, object):
-        """
-        Return true if the given object is defined in the given
-        module.
-        """
-        if module is None:
-            return True
-        elif inspect.isfunction(object):
-            return module.__dict__ is object.func_globals
-        elif inspect.isclass(object):
-            return module.__name__ == object.__module__
-        elif inspect.getmodule(object) is not None:
-            return module is inspect.getmodule(object)
-        elif hasattr(object, '__module__'):
-            return module.__name__ == object.__module__
-        elif isinstance(object, property):
-            return True # [XX] no way not be sure.
-        else:
-            raise ValueError("object must be a class or function")
-
-    def _find(self, tests, obj, name, module, source_lines, globs, seen):
-        """
-        Find tests for the given object and any contained objects, and
-        add them to `tests`.
-        """
-        if self._verbose:
-            print 'Finding tests in %s' % name
-
-        # If we've already processed this object, then ignore it.
-        if id(obj) in seen:
-            return
-        seen[id(obj)] = 1
-
-        # Find a test for this object, and add it to the list of tests.
-        test = self._get_test(obj, name, module, globs, source_lines)
-        if test is not None:
-            tests.append(test)
-
-        # Look for tests in a module's contained objects.
-        if inspect.ismodule(obj) and self._recurse:
-            for valname, val in obj.__dict__.items():
-                # Check if this contained object should be ignored.
-                if self._filter(val, name, valname):
-                    continue
-                valname = '%s.%s' % (name, valname)
-                # Recurse to functions & classes.
-                if ((inspect.isfunction(val) or inspect.isclass(val)) and
-                    self._from_module(module, val)):
-                    self._find(tests, val, valname, module, source_lines,
-                               globs, seen)
-
-        # Look for tests in a module's __test__ dictionary.
-        if inspect.ismodule(obj) and self._recurse:
-            for valname, val in getattr(obj, '__test__', {}).items():
-                if not isinstance(valname, basestring):
-                    raise ValueError("DocTestFinder.find: __test__ keys "
-                                     "must be strings: %r" %
-                                     (type(valname),))
-                if not (inspect.isfunction(val) or inspect.isclass(val) or
-                        inspect.ismethod(val) or inspect.ismodule(val) or
-                        isinstance(val, basestring)):
-                    raise ValueError("DocTestFinder.find: __test__ values "
-                                     "must be strings, functions, methods, "
-                                     "classes, or modules: %r" %
-                                     (type(val),))
-                valname = '%s.__test__.%s' % (name, valname)
-                self._find(tests, val, valname, module, source_lines,
-                           globs, seen)
-
-        # Look for tests in a class's contained objects.
-        if inspect.isclass(obj) and self._recurse:
-            for valname, val in obj.__dict__.items():
-                # Check if this contained object should be ignored.
-                if self._filter(val, name, valname):
-                    continue
-                # Special handling for staticmethod/classmethod.
-                if isinstance(val, staticmethod):
-                    val = getattr(obj, valname)
-                if isinstance(val, classmethod):
-                    val = getattr(obj, valname).im_func
-
-                # Recurse to methods, properties, and nested classes.
-                if ((inspect.isfunction(val) or inspect.isclass(val) or
-                      isinstance(val, property)) and
-                      self._from_module(module, val)):
-                    valname = '%s.%s' % (name, valname)
-                    self._find(tests, val, valname, module, source_lines,
-                               globs, seen)
-
-    def _get_test(self, obj, name, module, globs, source_lines):
-        """
-        Return a DocTest for the given object, if it defines a docstring;
-        otherwise, return None.
-        """
-        # Extract the object's docstring.  If it doesn't have one,
-        # then return None (no test for this object).
-        if isinstance(obj, basestring):
-            docstring = obj
-        else:
-            try:
-                if obj.__doc__ is None:
-                    docstring = ''
-                else:
-                    docstring = obj.__doc__
-                    if not isinstance(docstring, basestring):
-                        docstring = str(docstring)
-            except (TypeError, AttributeError):
-                docstring = ''
-
-        # Find the docstring's location in the file.
-        lineno = self._find_lineno(obj, source_lines)
-
-        # Don't bother if the docstring is empty.
-        if self._exclude_empty and not docstring:
-            return None
-
-        # Return a DocTest for this object.
-        if module is None:
-            filename = None
-        else:
-            filename = getattr(module, '__file__', module.__name__)
-            if filename[-4:] in (".pyc", ".pyo"):
-                filename = filename[:-1]
-        return self._parser.get_doctest(docstring, globs, name,
-                                        filename, lineno)
-
-    def _find_lineno(self, obj, source_lines):
-        """
-        Return a line number of the given object's docstring.  Note:
-        this method assumes that the object has a docstring.
-        """
-        lineno = None
-
-        # Find the line number for modules.
-        if inspect.ismodule(obj):
-            lineno = 0
-
-        # Find the line number for classes.
-        # Note: this could be fooled if a class is defined multiple
-        # times in a single file.
-        if inspect.isclass(obj):
-            if source_lines is None:
-                return None
-            pat = re.compile(r'^\s*class\s*%s\b' %
-                             getattr(obj, '__name__', '-'))
-            for i, line in enumerate(source_lines):
-                if pat.match(line):
-                    lineno = i
-                    break
-
-        # Find the line number for functions & methods.
-        if inspect.ismethod(obj): obj = obj.im_func
-        if inspect.isfunction(obj): obj = obj.func_code
-        if inspect.istraceback(obj): obj = obj.tb_frame
-        if inspect.isframe(obj): obj = obj.f_code
-        if inspect.iscode(obj):
-            lineno = getattr(obj, 'co_firstlineno', None)-1
-
-        # Find the line number where the docstring starts.  Assume
-        # that it's the first line that begins with a quote mark.
-        # Note: this could be fooled by a multiline function
-        # signature, where a continuation line begins with a quote
-        # mark.
-        if lineno is not None:
-            if source_lines is None:
-                return lineno+1
-            pat = re.compile('(^|.*:)\s*\w*("|\')')
-            for lineno in range(lineno, len(source_lines)):
-                if pat.match(source_lines[lineno]):
-                    return lineno
-
-        # We couldn't find the line number.
-        return None
-
-######################################################################
-## 5. DocTest Runner
-######################################################################
-
-class DocTestRunner:
-    """
-    A class used to run DocTest test cases, and accumulate statistics.
-    The `run` method is used to process a single DocTest case.  It
-    returns a tuple `(f, t)`, where `t` is the number of test cases
-    tried, and `f` is the number of test cases that failed.
-
-        >>> tests = DocTestFinder().find(_TestClass)
-        >>> runner = DocTestRunner(verbose=False)
-        >>> for test in tests:
-        ...     print runner.run(test)
-        (0, 2)
-        (0, 1)
-        (0, 2)
-        (0, 2)
-
-    The `summarize` method prints a summary of all the test cases that
-    have been run by the runner, and returns an aggregated `(f, t)`
-    tuple:
-
-        >>> runner.summarize(verbose=1)
-        4 items passed all tests:
-           2 tests in _TestClass
-           2 tests in _TestClass.__init__
-           2 tests in _TestClass.get
-           1 tests in _TestClass.square
-        7 tests in 4 items.
-        7 passed and 0 failed.
-        Test passed.
-        (0, 7)
-
-    The aggregated number of tried examples and failed examples is
-    also available via the `tries` and `failures` attributes:
-
-        >>> runner.tries
-        7
-        >>> runner.failures
-        0
-
-    The comparison between expected outputs and actual outputs is done
-    by an `OutputChecker`.  This comparison may be customized with a
-    number of option flags; see the documentation for `testmod` for
-    more information.  If the option flags are insufficient, then the
-    comparison may also be customized by passing a subclass of
-    `OutputChecker` to the constructor.
-
-    The test runner's display output can be controlled in two ways.
-    First, an output function (`out) can be passed to
-    `TestRunner.run`; this function will be called with strings that
-    should be displayed.  It defaults to `sys.stdout.write`.  If
-    capturing the output is not sufficient, then the display output
-    can be also customized by subclassing DocTestRunner, and
-    overriding the methods `report_start`, `report_success`,
-    `report_unexpected_exception`, and `report_failure`.
-    """
-    # This divider string is used to separate failure messages, and to
-    # separate sections of the summary.
-    DIVIDER = "*" * 70
-
-    def __init__(self, checker=None, verbose=None, optionflags=0):
-        """
-        Create a new test runner.
-
-        Optional keyword arg `checker` is the `OutputChecker` that
-        should be used to compare the expected outputs and actual
-        outputs of doctest examples.
-
-        Optional keyword arg 'verbose' prints lots of stuff if true,
-        only failures if false; by default, it's true iff '-v' is in
-        sys.argv.
-
-        Optional argument `optionflags` can be used to control how the
-        test runner compares expected output to actual output, and how
-        it displays failures.  See the documentation for `testmod` for
-        more information.
-        """
-        self._checker = checker or OutputChecker()
-        if verbose is None:
-            verbose = '-v' in sys.argv
-        self._verbose = verbose
-        self.optionflags = optionflags
-        self.original_optionflags = optionflags
-
-        # Keep track of the examples we've run.
-        self.tries = 0
-        self.failures = 0
-        self._name2ft = {}
-
-        # Create a fake output target for capturing doctest output.
-        self._fakeout = _SpoofOut()
-
-    #/////////////////////////////////////////////////////////////////
-    # Reporting methods
-    #/////////////////////////////////////////////////////////////////
-
-    def report_start(self, out, test, example):
-        """
-        Report that the test runner is about to process the given
-        example.  (Only displays a message if verbose=True)
-        """
-        if self._verbose:
-            if example.want:
-                out('Trying:\n' + _indent(example.source) +
-                    'Expecting:\n' + _indent(example.want))
-            else:
-                out('Trying:\n' + _indent(example.source) +
-                    'Expecting nothing\n')
-
-    def report_success(self, out, test, example, got):
-        """
-        Report that the given example ran successfully.  (Only
-        displays a message if verbose=True)
-        """
-        if self._verbose:
-            out("ok\n")
-
-    def report_failure(self, out, test, example, got):
-        """
-        Report that the given example failed.
-        """
-        out(self._failure_header(test, example) +
-            self._checker.output_difference(example, got, self.optionflags))
-
-    def report_unexpected_exception(self, out, test, example, exc_info):
-        """
-        Report that the given example raised an unexpected exception.
-        """
-        out(self._failure_header(test, example) +
-            'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
-
-    def _failure_header(self, test, example):
-        out = [self.DIVIDER]
-        if test.filename:
-            if test.lineno is not None and example.lineno is not None:
-                lineno = test.lineno + example.lineno + 1
-            else:
-                lineno = '?'
-            out.append('File "%s", line %s, in %s' %
-                       (test.filename, lineno, test.name))
-        else:
-            out.append('Line %s, in %s' % (example.lineno+1, test.name))
-        out.append('Failed example:')
-        source = example.source
-        out.append(_indent(source))
-        return '\n'.join(out)
-
-    #/////////////////////////////////////////////////////////////////
-    # DocTest Running
-    #/////////////////////////////////////////////////////////////////
-
-    def __run(self, test, compileflags, out):
-        """
-        Run the examples in `test`.  Write the outcome of each example
-        with one of the `DocTestRunner.report_*` methods, using the
-        writer function `out`.  `compileflags` is the set of compiler
-        flags that should be used to execute examples.  Return a tuple
-        `(f, t)`, where `t` is the number of examples tried, and `f`
-        is the number of examples that failed.  The examples are run
-        in the namespace `test.globs`.
-        """
-        # Keep track of the number of failures and tries.
-        failures = tries = 0
-
-        # Save the option flags (since option directives can be used
-        # to modify them).
-        original_optionflags = self.optionflags
-
-        SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
-
-        check = self._checker.check_output
-
-        # Process each example.
-        for examplenum, example in enumerate(test.examples):
-
-            # If REPORT_ONLY_FIRST_FAILURE is set, then supress
-            # reporting after the first failure.
-            quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
-                     failures > 0)
-
-            # Merge in the example's options.
-            self.optionflags = original_optionflags
-            if example.options:
-                for (optionflag, val) in example.options.items():
-                    if val:
-                        self.optionflags |= optionflag
-                    else:
-                        self.optionflags &= ~optionflag
-
-            # Record that we started this example.
-            tries += 1
-            if not quiet:
-                self.report_start(out, test, example)
-
-            # Use a special filename for compile(), so we can retrieve
-            # the source code during interactive debugging (see
-            # __patched_linecache_getlines).
-            filename = '<doctest %s[%d]>' % (test.name, examplenum)
-
-            # Run the example in the given context (globs), and record
-            # any exception that gets raised.  (But don't intercept
-            # keyboard interrupts.)
-            try:
-                # Don't blink!  This is where the user's code gets run.
-                exec compile(example.source, filename, "single",
-                             compileflags, 1) in test.globs
-                self.debugger.set_continue() # ==== Example Finished ====
-                exception = None
-            except KeyboardInterrupt:
-                raise
-            except:
-                exception = sys.exc_info()
-                self.debugger.set_continue() # ==== Example Finished ====
-
-            got = self._fakeout.getvalue()  # the actual output
-            self._fakeout.truncate(0)
-            outcome = FAILURE   # guilty until proved innocent or insane
-
-            # If the example executed without raising any exceptions,
-            # verify its output.
-            if exception is None:
-                if check(example.want, got, self.optionflags):
-                    outcome = SUCCESS
-
-            # The example raised an exception:  check if it was expected.
-            else:
-                exc_info = sys.exc_info()
-                exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
-                if not quiet:
-                    got += _exception_traceback(exc_info)
-
-                # If `example.exc_msg` is None, then we weren't expecting
-                # an exception.
-                if example.exc_msg is None:
-                    outcome = BOOM
-
-                # We expected an exception:  see whether it matches.
-                elif check(example.exc_msg, exc_msg, self.optionflags):
-                    outcome = SUCCESS
-
-                # Another chance if they didn't care about the detail.
-                elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
-                    m1 = re.match(r'[^:]*:', example.exc_msg)
-                    m2 = re.match(r'[^:]*:', exc_msg)
-                    if m1 and m2 and check(m1.group(0), m2.group(0),
-                                           self.optionflags):
-                        outcome = SUCCESS
-
-            # Report the outcome.
-            if outcome is SUCCESS:
-                if not quiet:
-                    self.report_success(out, test, example, got)
-            elif outcome is FAILURE:
-                if not quiet:
-                    self.report_failure(out, test, example, got)
-                failures += 1
-            elif outcome is BOOM:
-                if not quiet:
-                    self.report_unexpected_exception(out, test, example,
-                                                     exc_info)
-                failures += 1
-            else:
-                assert False, ("unknown outcome", outcome)
-
-        # Restore the option flags (in case they were modified)
-        self.optionflags = original_optionflags
-
-        # Record and return the number of failures and tries.
-        self.__record_outcome(test, failures, tries)
-        return failures, tries
-
-    def __record_outcome(self, test, f, t):
-        """
-        Record the fact that the given DocTest (`test`) generated `f`
-        failures out of `t` tried examples.
-        """
-        f2, t2 = self._name2ft.get(test.name, (0,0))
-        self._name2ft[test.name] = (f+f2, t+t2)
-        self.failures += f
-        self.tries += t
-
-    __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
-                                         r'(?P<name>[\w\.]+)'
-                                         r'\[(?P<examplenum>\d+)\]>$')
-    def __patched_linecache_getlines(self, filename, module_globals=None):
-        m = self.__LINECACHE_FILENAME_RE.match(filename)
-        if m and m.group('name') == self.test.name:
-            example = self.test.examples[int(m.group('examplenum'))]
-            return example.source.splitlines(True)
-        elif self.save_linecache_getlines.func_code.co_argcount>1:
-            return self.save_linecache_getlines(filename, module_globals)
-        else:
-            return self.save_linecache_getlines(filename)
-
-    def run(self, test, compileflags=None, out=None, clear_globs=True):
-        """
-        Run the examples in `test`, and display the results using the
-        writer function `out`.
-
-        The examples are run in the namespace `test.globs`.  If
-        `clear_globs` is true (the default), then this namespace will
-        be cleared after the test runs, to help with garbage
-        collection.  If you would like to examine the namespace after
-        the test completes, then use `clear_globs=False`.
-
-        `compileflags` gives the set of flags that should be used by
-        the Python compiler when running the examples.  If not
-        specified, then it will default to the set of future-import
-        flags that apply to `globs`.
-
-        The output of each example is checked using
-        `DocTestRunner.check_output`, and the results are formatted by
-        the `DocTestRunner.report_*` methods.
-        """
-        self.test = test
-
-        if compileflags is None:
-            compileflags = _extract_future_flags(test.globs)
-
-        save_stdout = sys.stdout
-        if out is None:
-            out = save_stdout.write
-        sys.stdout = self._fakeout
-
-        # Patch pdb.set_trace to restore sys.stdout during interactive
-        # debugging (so it's not still redirected to self._fakeout).
-        # Note that the interactive output will go to *our*
-        # save_stdout, even if that's not the real sys.stdout; this
-        # allows us to write test cases for the set_trace behavior.
-        save_set_trace = pdb.set_trace
-        self.debugger = _OutputRedirectingPdb(save_stdout)
-        self.debugger.reset()
-        pdb.set_trace = self.debugger.set_trace
-
-        # Patch linecache.getlines, so we can see the example's source
-        # when we're inside the debugger.
-        self.save_linecache_getlines = linecache.getlines
-        linecache.getlines = self.__patched_linecache_getlines
-
-        try:
-            return self.__run(test, compileflags, out)
-        finally:
-            sys.stdout = save_stdout
-            pdb.set_trace = save_set_trace
-            linecache.getlines = self.save_linecache_getlines
-            if clear_globs:
-                test.globs.clear()
-
-    #/////////////////////////////////////////////////////////////////
-    # Summarization
-    #/////////////////////////////////////////////////////////////////
-    def summarize(self, verbose=None):
-        """
-        Print a summary of all the test cases that have been run by
-        this DocTestRunner, and return a tuple `(f, t)`, where `f` is
-        the total number of failed examples, and `t` is the total
-        number of tried examples.
-
-        The optional `verbose` argument controls how detailed the
-        summary is.  If the verbosity is not specified, then the
-        DocTestRunner's verbosity is used.
-        """
-        if verbose is None:
-            verbose = self._verbose
-        notests = []
-        passed = []
-        failed = []
-        totalt = totalf = 0
-        for x in self._name2ft.items():
-            name, (f, t) = x
-            assert f <= t
-            totalt += t
-            totalf += f
-            if t == 0:
-                notests.append(name)
-            elif f == 0:
-                passed.append( (name, t) )
-            else:
-                failed.append(x)
-        if verbose:
-            if notests:
-                print len(notests), "items had no tests:"
-                notests.sort()
-                for thing in notests:
-                    print "   ", thing
-            if passed:
-                print len(passed), "items passed all tests:"
-                passed.sort()
-                for thing, count in passed:
-                    print " %3d tests in %s" % (count, thing)
-        if failed:
-            print self.DIVIDER
-            print len(failed), "items had failures:"
-            failed.sort()
-            for thing, (f, t) in failed:
-                print " %3d of %3d in %s" % (f, t, thing)
-        if verbose:
-            print totalt, "tests in", len(self._name2ft), "items."
-            print totalt - totalf, "passed and", totalf, "failed."
-        if totalf:
-            print "***Test Failed***", totalf, "failures."
-        elif verbose:
-            print "Test passed."
-        return totalf, totalt
-
-    #/////////////////////////////////////////////////////////////////
-    # Backward compatibility cruft to maintain doctest.master.
-    #/////////////////////////////////////////////////////////////////
-    def merge(self, other):
-        d = self._name2ft
-        for name, (f, t) in other._name2ft.items():
-            if name in d:
-                print "*** DocTestRunner.merge: '" + name + "' in both" \
-                    " testers; summing outcomes."
-                f2, t2 = d[name]
-                f = f + f2
-                t = t + t2
-            d[name] = f, t
-
-class OutputChecker:
-    """
-    A class used to check the whether the actual output from a doctest
-    example matches the expected output.  `OutputChecker` defines two
-    methods: `check_output`, which compares a given pair of outputs,
-    and returns true if they match; and `output_difference`, which
-    returns a string describing the differences between two outputs.
-    """
-    def check_output(self, want, got, optionflags):
-        """
-        Return True iff the actual output from an example (`got`)
-        matches the expected output (`want`).  These strings are
-        always considered to match if they are identical; but
-        depending on what option flags the test runner is using,
-        several non-exact match types are also possible.  See the
-        documentation for `TestRunner` for more information about
-        option flags.
-        """
-        # Handle the common case first, for efficiency:
-        # if they're string-identical, always return true.
-        if got == want:
-            return True
-
-        # The values True and False replaced 1 and 0 as the return
-        # value for boolean comparisons in Python 2.3.
-        if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
-            if (got,want) == ("True\n", "1\n"):
-                return True
-            if (got,want) == ("False\n", "0\n"):
-                return True
-
-        # <BLANKLINE> can be used as a special sequence to signify a
-        # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
-        if not (optionflags & DONT_ACCEPT_BLANKLINE):
-            # Replace <BLANKLINE> in want with a blank line.
-            want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
-                          '', want)
-            # If a line in got contains only spaces, then remove the
-            # spaces.
-            got = re.sub('(?m)^\s*?$', '', got)
-            if got == want:
-                return True
-
-        # This flag causes doctest to ignore any differences in the
-        # contents of whitespace strings.  Note that this can be used
-        # in conjunction with the ELLIPSIS flag.
-        if optionflags & NORMALIZE_WHITESPACE:
-            got = ' '.join(got.split())
-            want = ' '.join(want.split())
-            if got == want:
-                return True
-
-        # The ELLIPSIS flag says to let the sequence "..." in `want`
-        # match any substring in `got`.
-        if optionflags & ELLIPSIS:
-            if _ellipsis_match(want, got):
-                return True
-
-        # We didn't find any match; return false.
-        return False
-
-    # Should we do a fancy diff?
-    def _do_a_fancy_diff(self, want, got, optionflags):
-        # Not unless they asked for a fancy diff.
-        if not optionflags & (REPORT_UDIFF |
-                              REPORT_CDIFF |
-                              REPORT_NDIFF):
-            return False
-
-        # If expected output uses ellipsis, a meaningful fancy diff is
-        # too hard ... or maybe not.  In two real-life failures Tim saw,
-        # a diff was a major help anyway, so this is commented out.
-        # [todo] _ellipsis_match() knows which pieces do and don't match,
-        # and could be the basis for a kick-ass diff in this case.
-        ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
-        ##    return False
-
-        # ndiff does intraline difference marking, so can be useful even
-        # for 1-line differences.
-        if optionflags & REPORT_NDIFF:
-            return True
-
-        # The other diff types need at least a few lines to be helpful.
-        return want.count('\n') > 2 and got.count('\n') > 2
-
-    def output_difference(self, example, got, optionflags):
-        """
-        Return a string describing the differences between the
-        expected output for a given example (`example`) and the actual
-        output (`got`).  `optionflags` is the set of option flags used
-        to compare `want` and `got`.
-        """
-        want = example.want
-        # If <BLANKLINE>s are being used, then replace blank lines
-        # with <BLANKLINE> in the actual output string.
-        if not (optionflags & DONT_ACCEPT_BLANKLINE):
-            got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
-
-        # Check if we should use diff.
-        if self._do_a_fancy_diff(want, got, optionflags):
-            # Split want & got into lines.
-            want_lines = want.splitlines(True)  # True == keep line ends
-            got_lines = got.splitlines(True)
-            # Use difflib to find their differences.
-            if optionflags & REPORT_UDIFF:
-                diff = difflib.unified_diff(want_lines, got_lines, n=2)
-                diff = list(diff)[2:] # strip the diff header
-                kind = 'unified diff with -expected +actual'
-            elif optionflags & REPORT_CDIFF:
-                diff = difflib.context_diff(want_lines, got_lines, n=2)
-                diff = list(diff)[2:] # strip the diff header
-                kind = 'context diff with expected followed by actual'
-            elif optionflags & REPORT_NDIFF:
-                engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
-                diff = list(engine.compare(want_lines, got_lines))
-                kind = 'ndiff with -expected +actual'
-            else:
-                assert 0, 'Bad diff option'
-            # Remove trailing whitespace on diff output.
-            diff = [line.rstrip() + '\n' for line in diff]
-            return 'Differences (%s):\n' % kind + _indent(''.join(diff))
-
-        # If we're not using diff, then simply list the expected
-        # output followed by the actual output.
-        if want and got:
-            return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
-        elif want:
-            return 'Expected:\n%sGot nothing\n' % _indent(want)
-        elif got:
-            return 'Expected nothing\nGot:\n%s' % _indent(got)
-        else:
-            return 'Expected nothing\nGot nothing\n'
-
-class DocTestFailure(Exception):
-    """A DocTest example has failed in debugging mode.
-
-    The exception instance has variables:
-
-    - test: the DocTest object being run
-
-    - excample: the Example object that failed
-
-    - got: the actual output
-    """
-    def __init__(self, test, example, got):
-        self.test = test
-        self.example = example
-        self.got = got
-
-    def __str__(self):
-        return str(self.test)
-
-class UnexpectedException(Exception):
-    """A DocTest example has encountered an unexpected exception
-
-    The exception instance has variables:
-
-    - test: the DocTest object being run
-
-    - excample: the Example object that failed
-
-    - exc_info: the exception info
-    """
-    def __init__(self, test, example, exc_info):
-        self.test = test
-        self.example = example
-        self.exc_info = exc_info
-
-    def __str__(self):
-        return str(self.test)
-
-class DebugRunner(DocTestRunner):
-    r"""Run doc tests but raise an exception as soon as there is a failure.
-
-       If an unexpected exception occurs, an UnexpectedException is raised.
-       It contains the test, the example, and the original exception:
-
-         >>> runner = DebugRunner(verbose=False)
-         >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
-         ...                                    {}, 'foo', 'foo.py', 0)
-         >>> try:
-         ...     runner.run(test)
-         ... except UnexpectedException, failure:
-         ...     pass
-
-         >>> failure.test is test
-         True
-
-         >>> failure.example.want
-         '42\n'
-
-         >>> exc_info = failure.exc_info
-         >>> raise exc_info[0], exc_info[1], exc_info[2]
-         Traceback (most recent call last):
-         ...
-         KeyError
-
-       We wrap the original exception to give the calling application
-       access to the test and example information.
-
-       If the output doesn't match, then a DocTestFailure is raised:
-
-         >>> test = DocTestParser().get_doctest('''
-         ...      >>> x = 1
-         ...      >>> x
-         ...      2
-         ...      ''', {}, 'foo', 'foo.py', 0)
-
-         >>> try:
-         ...    runner.run(test)
-         ... except DocTestFailure, failure:
-         ...    pass
-
-       DocTestFailure objects provide access to the test:
-
-         >>> failure.test is test
-         True
-
-       As well as to the example:
-
-         >>> failure.example.want
-         '2\n'
-
-       and the actual output:
-
-         >>> failure.got
-         '1\n'
-
-       If a failure or error occurs, the globals are left intact:
-
-         >>> del test.globs['__builtins__']
-         >>> test.globs
-         {'x': 1}
-
-         >>> test = DocTestParser().get_doctest('''
-         ...      >>> x = 2
-         ...      >>> raise KeyError
-         ...      ''', {}, 'foo', 'foo.py', 0)
-
-         >>> runner.run(test)
-         Traceback (most recent call last):
-         ...
-         UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
-
-         >>> del test.globs['__builtins__']
-         >>> test.globs
-         {'x': 2}
-
-       But the globals are cleared if there is no error:
-
-         >>> test = DocTestParser().get_doctest('''
-         ...      >>> x = 2
-         ...      ''', {}, 'foo', 'foo.py', 0)
-
-         >>> runner.run(test)
-         (0, 1)
-
-         >>> test.globs
-         {}
-
-       """
-
-    def run(self, test, compileflags=None, out=None, clear_globs=True):
-        r = DocTestRunner.run(self, test, compileflags, out, False)
-        if clear_globs:
-            test.globs.clear()
-        return r
-
-    def report_unexpected_exception(self, out, test, example, exc_info):
-        raise UnexpectedException(test, example, exc_info)
-
-    def report_failure(self, out, test, example, got):
-        raise DocTestFailure(test, example, got)
-
-######################################################################
-## 6. Test Functions
-######################################################################
-# These should be backwards compatible.
-
-# For backward compatibility, a global instance of a DocTestRunner
-# class, updated by testmod.
-master = None
-
-def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
-            report=True, optionflags=0, extraglobs=None,
-            raise_on_error=False, exclude_empty=False):
-    """m=None, name=None, globs=None, verbose=None, isprivate=None,
-       report=True, optionflags=0, extraglobs=None, raise_on_error=False,
-       exclude_empty=False
-
-    Test examples in docstrings in functions and classes reachable
-    from module m (or the current module if m is not supplied), starting
-    with m.__doc__.  Unless isprivate is specified, private names
-    are not skipped.
-
-    Also test examples reachable from dict m.__test__ if it exists and is
-    not None.  m.__test__ maps names to functions, classes and strings;
-    function and class docstrings are tested even if the name is private;
-    strings are tested directly, as if they were docstrings.
-
-    Return (#failures, #tests).
-
-    See doctest.__doc__ for an overview.
-
-    Optional keyword arg "name" gives the name of the module; by default
-    use m.__name__.
-
-    Optional keyword arg "globs" gives a dict to be used as the globals
-    when executing examples; by default, use m.__dict__.  A copy of this
-    dict is actually used for each docstring, so that each docstring's
-    examples start with a clean slate.
-
-    Optional keyword arg "extraglobs" gives a dictionary that should be
-    merged into the globals that are used to execute examples.  By
-    default, no extra globals are used.  This is new in 2.4.
-
-    Optional keyword arg "verbose" prints lots of stuff if true, prints
-    only failures if false; by default, it's true iff "-v" is in sys.argv.
-
-    Optional keyword arg "report" prints a summary at the end when true,
-    else prints nothing at the end.  In verbose mode, the summary is
-    detailed, else very brief (in fact, empty if all tests passed).
-
-    Optional keyword arg "optionflags" or's together module constants,
-    and defaults to 0.  This is new in 2.3.  Possible values (see the
-    docs for details):
-
-        DONT_ACCEPT_TRUE_FOR_1
-        DONT_ACCEPT_BLANKLINE
-        NORMALIZE_WHITESPACE
-        ELLIPSIS
-        IGNORE_EXCEPTION_DETAIL
-        REPORT_UDIFF
-        REPORT_CDIFF
-        REPORT_NDIFF
-        REPORT_ONLY_FIRST_FAILURE
-
-    Optional keyword arg "raise_on_error" raises an exception on the
-    first unexpected exception or failure. This allows failures to be
-    post-mortem debugged.
-
-    Deprecated in Python 2.4:
-    Optional keyword arg "isprivate" specifies a function used to
-    determine whether a name is private.  The default function is
-    treat all functions as public.  Optionally, "isprivate" can be
-    set to doctest.is_private to skip over functions marked as private
-    using the underscore naming convention; see its docs for details.
-
-    Advanced tomfoolery:  testmod runs methods of a local instance of
-    class doctest.Tester, then merges the results into (or creates)
-    global Tester instance doctest.master.  Methods of doctest.master
-    can be called directly too, if you want to do something unusual.
-    Passing report=0 to testmod is especially useful then, to delay
-    displaying a summary.  Invoke doctest.master.summarize(verbose)
-    when you're done fiddling.
-    """
-    global master
-
-    if isprivate is not None:
-        warnings.warn("the isprivate argument is deprecated; "
-                      "examine DocTestFinder.find() lists instead",
-                      DeprecationWarning)
-
-    # If no module was given, then use __main__.
-    if m is None:
-        # DWA - m will still be None if this wasn't invoked from the command
-        # line, in which case the following TypeError is about as good an error
-        # as we should expect
-        m = sys.modules.get('__main__')
-
-    # Check that we were actually given a module.
-    if not inspect.ismodule(m):
-        raise TypeError("testmod: module required; %r" % (m,))
-
-    # If no name was given, then use the module's name.
-    if name is None:
-        name = m.__name__
-
-    # Find, parse, and run all tests in the given module.
-    finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
-
-    if raise_on_error:
-        runner = DebugRunner(verbose=verbose, optionflags=optionflags)
-    else:
-        runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-
-    for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
-        runner.run(test)
-
-    if report:
-        runner.summarize()
-
-    if master is None:
-        master = runner
-    else:
-        master.merge(runner)
-
-    return runner.failures, runner.tries
-
-def testfile(filename, module_relative=True, name=None, package=None,
-             globs=None, verbose=None, report=True, optionflags=0,
-             extraglobs=None, raise_on_error=False, parser=DocTestParser()):
-    """
-    Test examples in the given file.  Return (#failures, #tests).
-
-    Optional keyword arg "module_relative" specifies how filenames
-    should be interpreted:
-
-      - If "module_relative" is True (the default), then "filename"
-         specifies a module-relative path.  By default, this path is
-         relative to the calling module's directory; but if the
-         "package" argument is specified, then it is relative to that
-         package.  To ensure os-independence, "filename" should use
-         "/" characters to separate path segments, and should not
-         be an absolute path (i.e., it may not begin with "/").
-
-      - If "module_relative" is False, then "filename" specifies an
-        os-specific path.  The path may be absolute or relative (to
-        the current working directory).
-
-    Optional keyword arg "name" gives the name of the test; by default
-    use the file's basename.
-
-    Optional keyword argument "package" is a Python package or the
-    name of a Python package whose directory should be used as the
-    base directory for a module relative filename.  If no package is
-    specified, then the calling module's directory is used as the base
-    directory for module relative filenames.  It is an error to
-    specify "package" if "module_relative" is False.
-
-    Optional keyword arg "globs" gives a dict to be used as the globals
-    when executing examples; by default, use {}.  A copy of this dict
-    is actually used for each docstring, so that each docstring's
-    examples start with a clean slate.
-
-    Optional keyword arg "extraglobs" gives a dictionary that should be
-    merged into the globals that are used to execute examples.  By
-    default, no extra globals are used.
-
-    Optional keyword arg "verbose" prints lots of stuff if true, prints
-    only failures if false; by default, it's true iff "-v" is in sys.argv.
-
-    Optional keyword arg "report" prints a summary at the end when true,
-    else prints nothing at the end.  In verbose mode, the summary is
-    detailed, else very brief (in fact, empty if all tests passed).
-
-    Optional keyword arg "optionflags" or's together module constants,
-    and defaults to 0.  Possible values (see the docs for details):
-
-        DONT_ACCEPT_TRUE_FOR_1
-        DONT_ACCEPT_BLANKLINE
-        NORMALIZE_WHITESPACE
-        ELLIPSIS
-        IGNORE_EXCEPTION_DETAIL
-        REPORT_UDIFF
-        REPORT_CDIFF
-        REPORT_NDIFF
-        REPORT_ONLY_FIRST_FAILURE
-
-    Optional keyword arg "raise_on_error" raises an exception on the
-    first unexpected exception or failure. This allows failures to be
-    post-mortem debugged.
-
-    Optional keyword arg "parser" specifies a DocTestParser (or
-    subclass) that should be used to extract tests from the files.
-
-    Advanced tomfoolery:  testmod runs methods of a local instance of
-    class doctest.Tester, then merges the results into (or creates)
-    global Tester instance doctest.master.  Methods of doctest.master
-    can be called directly too, if you want to do something unusual.
-    Passing report=0 to testmod is especially useful then, to delay
-    displaying a summary.  Invoke doctest.master.summarize(verbose)
-    when you're done fiddling.
-    """
-    global master
-
-    if package and not module_relative:
-        raise ValueError("Package may only be specified for module-"
-                         "relative paths.")
-
-    # Relativize the path
-    if module_relative:
-        package = _normalize_module(package)
-        filename = _module_relative_path(package, filename)
-
-    # If no name was given, then use the file's name.
-    if name is None:
-        name = os.path.basename(filename)
-
-    # Assemble the globals.
-    if globs is None:
-        globs = {}
-    else:
-        globs = globs.copy()
-    if extraglobs is not None:
-        globs.update(extraglobs)
-
-    if raise_on_error:
-        runner = DebugRunner(verbose=verbose, optionflags=optionflags)
-    else:
-        runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-
-    # Read the file, convert it to a test, and run it.
-    f = open(filename)
-    s = f.read()
-    f.close()
-    test = parser.get_doctest(s, globs, name, filename, 0)
-    runner.run(test)
-
-    if report:
-        runner.summarize()
-
-    if master is None:
-        master = runner
-    else:
-        master.merge(runner)
-
-    return runner.failures, runner.tries
-
-def run_docstring_examples(f, globs, verbose=False, name="NoName",
-                           compileflags=None, optionflags=0):
-    """
-    Test examples in the given object's docstring (`f`), using `globs`
-    as globals.  Optional argument `name` is used in failure messages.
-    If the optional argument `verbose` is true, then generate output
-    even if there are no failures.
-
-    `compileflags` gives the set of flags that should be used by the
-    Python compiler when running the examples.  If not specified, then
-    it will default to the set of future-import flags that apply to
-    `globs`.
-
-    Optional keyword arg `optionflags` specifies options for the
-    testing and output.  See the documentation for `testmod` for more
-    information.
-    """
-    # Find, parse, and run all tests in the given module.
-    finder = DocTestFinder(verbose=verbose, recurse=False)
-    runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-    for test in finder.find(f, name, globs=globs):
-        runner.run(test, compileflags=compileflags)
-
-######################################################################
-## 7. Tester
-######################################################################
-# This is provided only for backwards compatibility.  It's not
-# actually used in any way.
-
-class Tester:
-    def __init__(self, mod=None, globs=None, verbose=None,
-                 isprivate=None, optionflags=0):
-
-        warnings.warn("class Tester is deprecated; "
-                      "use class doctest.DocTestRunner instead",
-                      DeprecationWarning, stacklevel=2)
-        if mod is None and globs is None:
-            raise TypeError("Tester.__init__: must specify mod or globs")
-        if mod is not None and not inspect.ismodule(mod):
-            raise TypeError("Tester.__init__: mod must be a module; %r" %
-                            (mod,))
-        if globs is None:
-            globs = mod.__dict__
-        self.globs = globs
-
-        self.verbose = verbose
-        self.isprivate = isprivate
-        self.optionflags = optionflags
-        self.testfinder = DocTestFinder(_namefilter=isprivate)
-        self.testrunner = DocTestRunner(verbose=verbose,
-                                        optionflags=optionflags)
-
-    def runstring(self, s, name):
-        test = DocTestParser().get_doctest(s, self.globs, name, None, None)
-        if self.verbose:
-            print "Running string", name
-        (f,t) = self.testrunner.run(test)
-        if self.verbose:
-            print f, "of", t, "examples failed in string", name
-        return (f,t)
-
-    def rundoc(self, object, name=None, module=None):
-        f = t = 0
-        tests = self.testfinder.find(object, name, module=module,
-                                     globs=self.globs)
-        for test in tests:
-            (f2, t2) = self.testrunner.run(test)
-            (f,t) = (f+f2, t+t2)
-        return (f,t)
-
-    def rundict(self, d, name, module=None):
-        import types
-        m = types.ModuleType(name)
-        m.__dict__.update(d)
-        if module is None:
-            module = False
-        return self.rundoc(m, name, module)
-
-    def run__test__(self, d, name):
-        import types
-        m = types.ModuleType(name)
-        m.__test__ = d
-        return self.rundoc(m, name)
-
-    def summarize(self, verbose=None):
-        return self.testrunner.summarize(verbose)
-
-    def merge(self, other):
-        self.testrunner.merge(other.testrunner)
-
-######################################################################
-## 8. Unittest Support
-######################################################################
-
-_unittest_reportflags = 0
-
-def set_unittest_reportflags(flags):
-    """Sets the unittest option flags.
-
-    The old flag is returned so that a runner could restore the old
-    value if it wished to:
-
-      >>> old = _unittest_reportflags
-      >>> set_unittest_reportflags(REPORT_NDIFF |
-      ...                          REPORT_ONLY_FIRST_FAILURE) == old
-      True
-
-      >>> import doctest
-      >>> doctest._unittest_reportflags == (REPORT_NDIFF |
-      ...                                   REPORT_ONLY_FIRST_FAILURE)
-      True
-
-    Only reporting flags can be set:
-
-      >>> set_unittest_reportflags(ELLIPSIS)
-      Traceback (most recent call last):
-      ...
-      ValueError: ('Only reporting flags allowed', 8)
-
-      >>> set_unittest_reportflags(old) == (REPORT_NDIFF |
-      ...                                   REPORT_ONLY_FIRST_FAILURE)
-      True
-    """
-    global _unittest_reportflags
-
-    if (flags & REPORTING_FLAGS) != flags:
-        raise ValueError("Only reporting flags allowed", flags)
-    old = _unittest_reportflags
-    _unittest_reportflags = flags
-    return old
-
-
-class DocTestCase(unittest.TestCase):
-
-    def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
-                 checker=None):
-
-        unittest.TestCase.__init__(self)
-        self._dt_optionflags = optionflags
-        self._dt_checker = checker
-        self._dt_test = test
-        self._dt_setUp = setUp
-        self._dt_tearDown = tearDown
-
-    def setUp(self):
-        test = self._dt_test
-
-        if self._dt_setUp is not None:
-            self._dt_setUp(test)
-
-    def tearDown(self):
-        test = self._dt_test
-
-        if self._dt_tearDown is not None:
-            self._dt_tearDown(test)
-
-        test.globs.clear()
-
-    def runTest(self):
-        test = self._dt_test
-        old = sys.stdout
-        new = StringIO()
-        optionflags = self._dt_optionflags
-
-        if not (optionflags & REPORTING_FLAGS):
-            # The option flags don't include any reporting flags,
-            # so add the default reporting flags
-            optionflags |= _unittest_reportflags
-
-        runner = DocTestRunner(optionflags=optionflags,
-                               checker=self._dt_checker, verbose=False)
-
-        try:
-            runner.DIVIDER = "-"*70
-            failures, tries = runner.run(
-                test, out=new.write, clear_globs=False)
-        finally:
-            sys.stdout = old
-
-        if failures:
-            raise self.failureException(self.format_failure(new.getvalue()))
-
-    def format_failure(self, err):
-        test = self._dt_test
-        if test.lineno is None:
-            lineno = 'unknown line number'
-        else:
-            lineno = '%s' % test.lineno
-        lname = '.'.join(test.name.split('.')[-1:])
-        return ('Failed doctest test for %s\n'
-                '  File "%s", line %s, in %s\n\n%s'
-                % (test.name, test.filename, lineno, lname, err)
-                )
-
-    def debug(self):
-        r"""Run the test case without results and without catching exceptions
-
-           The unit test framework includes a debug method on test cases
-           and test suites to support post-mortem debugging.  The test code
-           is run in such a way that errors are not caught.  This way a
-           caller can catch the errors and initiate post-mortem debugging.
-
-           The DocTestCase provides a debug method that raises
-           UnexpectedException errors if there is an unexepcted
-           exception:
-
-             >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
-             ...                {}, 'foo', 'foo.py', 0)
-             >>> case = DocTestCase(test)
-             >>> try:
-             ...     case.debug()
-             ... except UnexpectedException, failure:
-             ...     pass
-
-           The UnexpectedException contains the test, the example, and
-           the original exception:
-
-             >>> failure.test is test
-             True
-
-             >>> failure.example.want
-             '42\n'
-
-             >>> exc_info = failure.exc_info
-             >>> raise exc_info[0], exc_info[1], exc_info[2]
-             Traceback (most recent call last):
-             ...
-             KeyError
-
-           If the output doesn't match, then a DocTestFailure is raised:
-
-             >>> test = DocTestParser().get_doctest('''
-             ...      >>> x = 1
-             ...      >>> x
-             ...      2
-             ...      ''', {}, 'foo', 'foo.py', 0)
-             >>> case = DocTestCase(test)
-
-             >>> try:
-             ...    case.debug()
-             ... except DocTestFailure, failure:
-             ...    pass
-
-           DocTestFailure objects provide access to the test:
-
-             >>> failure.test is test
-             True
-
-           As well as to the example:
-
-             >>> failure.example.want
-             '2\n'
-
-           and the actual output:
-
-             >>> failure.got
-             '1\n'
-
-           """
-
-        self.setUp()
-        runner = DebugRunner(optionflags=self._dt_optionflags,
-                             checker=self._dt_checker, verbose=False)
-        runner.run(self._dt_test)
-        self.tearDown()
-
-    def id(self):
-        return self._dt_test.name
-
-    def __repr__(self):
-        name = self._dt_test.name.split('.')
-        return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
-
-    __str__ = __repr__
-
-    def shortDescription(self):
-        return "Doctest: " + self._dt_test.name
-
-def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
-                 **options):
-    """
-    Convert doctest tests for a module to a unittest test suite.
-
-    This converts each documentation string in a module that
-    contains doctest tests to a unittest test case.  If any of the
-    tests in a doc string fail, then the test case fails.  An exception
-    is raised showing the name of the file containing the test and a
-    (sometimes approximate) line number.
-
-    The `module` argument provides the module to be tested.  The argument
-    can be either a module or a module name.
-
-    If no argument is given, the calling module is used.
-
-    A number of options may be provided as keyword arguments:
-
-    setUp
-      A set-up function.  This is called before running the
-      tests in each file. The setUp function will be passed a DocTest
-      object.  The setUp function can access the test globals as the
-      globs attribute of the test passed.
-
-    tearDown
-      A tear-down function.  This is called after running the
-      tests in each file.  The tearDown function will be passed a DocTest
-      object.  The tearDown function can access the test globals as the
-      globs attribute of the test passed.
-
-    globs
-      A dictionary containing initial global variables for the tests.
-
-    optionflags
-       A set of doctest option flags expressed as an integer.
-    """
-
-    if test_finder is None:
-        test_finder = DocTestFinder()
-
-    module = _normalize_module(module)
-    tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
-    if globs is None:
-        globs = module.__dict__
-    if not tests:
-        # Why do we want to do this? Because it reveals a bug that might
-        # otherwise be hidden.
-        raise ValueError(module, "has no tests")
-
-    tests.sort()
-    suite = unittest.TestSuite()
-    for test in tests:
-        if len(test.examples) == 0:
-            continue
-        if not test.filename:
-            filename = module.__file__
-            if filename[-4:] in (".pyc", ".pyo"):
-                filename = filename[:-1]
-            test.filename = filename
-        suite.addTest(DocTestCase(test, **options))
-
-    return suite
-
-class DocFileCase(DocTestCase):
-
-    def id(self):
-        return '_'.join(self._dt_test.name.split('.'))
-
-    def __repr__(self):
-        return self._dt_test.filename
-    __str__ = __repr__
-
-    def format_failure(self, err):
-        return ('Failed doctest test for %s\n  File "%s", line 0\n\n%s'
-                % (self._dt_test.name, self._dt_test.filename, err)
-                )
-
-def DocFileTest(path, module_relative=True, package=None,
-                globs=None, parser=DocTestParser(), **options):
-    if globs is None:
-        globs = {}
-
-    if package and not module_relative:
-        raise ValueError("Package may only be specified for module-"
-                         "relative paths.")
-
-    # Relativize the path.
-    if module_relative:
-        package = _normalize_module(package)
-        path = _module_relative_path(package, path)
-
-    # Find the file and read it.
-    name = os.path.basename(path)
-    f = open(path)
-    doc = f.read()
-    f.close()
-
-    # Convert it to a test, and wrap it in a DocFileCase.
-    test = parser.get_doctest(doc, globs, name, path, 0)
-    return DocFileCase(test, **options)
-
-def DocFileSuite(*paths, **kw):
-    """A unittest suite for one or more doctest files.
-
-    The path to each doctest file is given as a string; the
-    interpretation of that string depends on the keyword argument
-    "module_relative".
-
-    A number of options may be provided as keyword arguments:
-
-    module_relative
-      If "module_relative" is True, then the given file paths are
-      interpreted as os-independent module-relative paths.  By
-      default, these paths are relative to the calling module's
-      directory; but if the "package" argument is specified, then
-      they are relative to that package.  To ensure os-independence,
-      "filename" should use "/" characters to separate path
-      segments, and may not be an absolute path (i.e., it may not
-      begin with "/").
-
-      If "module_relative" is False, then the given file paths are
-      interpreted as os-specific paths.  These paths may be absolute
-      or relative (to the current working directory).
-
-    package
-      A Python package or the name of a Python package whose directory
-      should be used as the base directory for module relative paths.
-      If "package" is not specified, then the calling module's
-      directory is used as the base directory for module relative
-      filenames.  It is an error to specify "package" if
-      "module_relative" is False.
-
-    setUp
-      A set-up function.  This is called before running the
-      tests in each file. The setUp function will be passed a DocTest
-      object.  The setUp function can access the test globals as the
-      globs attribute of the test passed.
-
-    tearDown
-      A tear-down function.  This is called after running the
-      tests in each file.  The tearDown function will be passed a DocTest
-      object.  The tearDown function can access the test globals as the
-      globs attribute of the test passed.
-
-    globs
-      A dictionary containing initial global variables for the tests.
-
-    optionflags
-      A set of doctest option flags expressed as an integer.
-
-    parser
-      A DocTestParser (or subclass) that should be used to extract
-      tests from the files.
-    """
-    suite = unittest.TestSuite()
-
-    # We do this here so that _normalize_module is called at the right
-    # level.  If it were called in DocFileTest, then this function
-    # would be the caller and we might guess the package incorrectly.
-    if kw.get('module_relative', True):
-        kw['package'] = _normalize_module(kw.get('package'))
-
-    for path in paths:
-        suite.addTest(DocFileTest(path, **kw))
-
-    return suite
-
-######################################################################
-## 9. Debugging Support
-######################################################################
-
-def script_from_examples(s):
-    r"""Extract script from text with examples.
-
-       Converts text with examples to a Python script.  Example input is
-       converted to regular code.  Example output and all other words
-       are converted to comments:
-
-       >>> text = '''
-       ...       Here are examples of simple math.
-       ...
-       ...           Python has super accurate integer addition
-       ...
-       ...           >>> 2 + 2
-       ...           5
-       ...
-       ...           And very friendly error messages:
-       ...
-       ...           >>> 1/0
-       ...           To Infinity
-       ...           And
-       ...           Beyond
-       ...
-       ...           You can use logic if you want:
-       ...
-       ...           >>> if 0:
-       ...           ...    blah
-       ...           ...    blah
-       ...           ...
-       ...
-       ...           Ho hum
-       ...           '''
-
-       >>> print script_from_examples(text)
-       # Here are examples of simple math.
-       #
-       #     Python has super accurate integer addition
-       #
-       2 + 2
-       # Expected:
-       ## 5
-       #
-       #     And very friendly error messages:
-       #
-       1/0
-       # Expected:
-       ## To Infinity
-       ## And
-       ## Beyond
-       #
-       #     You can use logic if you want:
-       #
-       if 0:
-          blah
-          blah
-       #
-       #     Ho hum
-       """
-    output = []
-    for piece in DocTestParser().parse(s):
-        if isinstance(piece, Example):
-            # Add the example's source code (strip trailing NL)
-            output.append(piece.source[:-1])
-            # Add the expected output:
-            want = piece.want
-            if want:
-                output.append('# Expected:')
-                output += ['## '+l for l in want.split('\n')[:-1]]
-        else:
-            # Add non-example text.
-            output += [_comment_line(l)
-                       for l in piece.split('\n')[:-1]]
-
-    # Trim junk on both ends.
-    while output and output[-1] == '#':
-        output.pop()
-    while output and output[0] == '#':
-        output.pop(0)
-    # Combine the output, and return it.
-    return '\n'.join(output)
-
-def testsource(module, name):
-    """Extract the test sources from a doctest docstring as a script.
-
-    Provide the module (or dotted name of the module) containing the
-    test to be debugged and the name (within the module) of the object
-    with the doc string with tests to be debugged.
-    """
-    module = _normalize_module(module)
-    tests = DocTestFinder().find(module)
-    test = [t for t in tests if t.name == name]
-    if not test:
-        raise ValueError(name, "not found in tests")
-    test = test[0]
-    testsrc = script_from_examples(test.docstring)
-    return testsrc
-
-def debug_src(src, pm=False, globs=None):
-    """Debug a single doctest docstring, in argument `src`'"""
-    testsrc = script_from_examples(src)
-    debug_script(testsrc, pm, globs)
-
-def debug_script(src, pm=False, globs=None):
-    "Debug a test script.  `src` is the script, as a string."
-    import pdb
-
-    # Note that tempfile.NameTemporaryFile() cannot be used.  As the
-    # docs say, a file so created cannot be opened by name a second time
-    # on modern Windows boxes, and execfile() needs to open it.
-    srcfilename = tempfile.mktemp(".py", "doctestdebug")
-    f = open(srcfilename, 'w')
-    f.write(src)
-    f.close()
-
-    try:
-        if globs:
-            globs = globs.copy()
-        else:
-            globs = {}
-
-        if pm:
-            try:
-                execfile(srcfilename, globs, globs)
-            except:
-                print sys.exc_info()[1]
-                pdb.post_mortem(sys.exc_info()[2])
-        else:
-            # Note that %r is vital here.  '%s' instead can, e.g., cause
-            # backslashes to get treated as metacharacters on Windows.
-            pdb.run("execfile(%r)" % srcfilename, globs, globs)
-
-    finally:
-        os.remove(srcfilename)
-
-def debug(module, name, pm=False):
-    """Debug a single doctest docstring.
-
-    Provide the module (or dotted name of the module) containing the
-    test to be debugged and the name (within the module) of the object
-    with the docstring with tests to be debugged.
-    """
-    module = _normalize_module(module)
-    testsrc = testsource(module, name)
-    debug_script(testsrc, pm, module.__dict__)
-
-######################################################################
-## 10. Example Usage
-######################################################################
-class _TestClass:
-    """
-    A pointless class, for sanity-checking of docstring testing.
-
-    Methods:
-        square()
-        get()
-
-    >>> _TestClass(13).get() + _TestClass(-12).get()
-    1
-    >>> hex(_TestClass(13).square().get())
-    '0xa9'
-    """
-
-    def __init__(self, val):
-        """val -> _TestClass object with associated value val.
-
-        >>> t = _TestClass(123)
-        >>> print t.get()
-        123
-        """
-
-        self.val = val
-
-    def square(self):
-        """square() -> square TestClass's associated value
-
-        >>> _TestClass(13).square().get()
-        169
-        """
-
-        self.val = self.val ** 2
-        return self
-
-    def get(self):
-        """get() -> return TestClass's associated value.
-
-        >>> x = _TestClass(-42)
-        >>> print x.get()
-        -42
-        """
-
-        return self.val
-
-__test__ = {"_TestClass": _TestClass,
-            "string": r"""
-                      Example of a string object, searched as-is.
-                      >>> x = 1; y = 2
-                      >>> x + y, x * y
-                      (3, 2)
-                      """,
-
-            "bool-int equivalence": r"""
-                                    In 2.2, boolean expressions displayed
-                                    0 or 1.  By default, we still accept
-                                    them.  This can be disabled by passing
-                                    DONT_ACCEPT_TRUE_FOR_1 to the new
-                                    optionflags argument.
-                                    >>> 4 == 4
-                                    1
-                                    >>> 4 == 4
-                                    True
-                                    >>> 4 > 4
-                                    0
-                                    >>> 4 > 4
-                                    False
-                                    """,
-
-            "blank lines": r"""
-                Blank lines can be marked with <BLANKLINE>:
-                    >>> print 'foo\n\nbar\n'
-                    foo
-                    <BLANKLINE>
-                    bar
-                    <BLANKLINE>
-            """,
-
-            "ellipsis": r"""
-                If the ellipsis flag is used, then '...' can be used to
-                elide substrings in the desired output:
-                    >>> print range(1000) #doctest: +ELLIPSIS
-                    [0, 1, 2, ..., 999]
-            """,
-
-            "whitespace normalization": r"""
-                If the whitespace normalization flag is used, then
-                differences in whitespace are ignored.
-                    >>> print range(30) #doctest: +NORMALIZE_WHITESPACE
-                    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
-                     15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
-                     27, 28, 29]
-            """,
-           }
-
-def _test():
-    r = unittest.TextTestRunner()
-    r.run(DocTestSuite())
-
-if __name__ == "__main__":
-    _test()
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/indexes/test_links_priority/external.html b/vendor/distribute-0.6.34/setuptools/tests/indexes/test_links_priority/external.html
deleted file mode 100644
index 92e4702f634dfb37a404bec3103b76f6afcaa917..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/indexes/test_links_priority/external.html
+++ /dev/null
@@ -1,3 +0,0 @@
-<html><body>
-<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
-</body></html>
diff --git a/vendor/distribute-0.6.34/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/vendor/distribute-0.6.34/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
deleted file mode 100644
index fefb028bd3ee7d45a414d6e96a7b2a21ffd7eda7..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
+++ /dev/null
@@ -1,4 +0,0 @@
-<html><body>
-<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
-<a href="../../external.html" rel="homepage">external homepage</a><br/>
-</body></html>
diff --git a/vendor/distribute-0.6.34/setuptools/tests/py26compat.py b/vendor/distribute-0.6.34/setuptools/tests/py26compat.py
deleted file mode 100644
index d4fb891af676421a8eb4be1227c8b3f3d044c01f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/py26compat.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import unittest
-
-try:
-	# provide skipIf for Python 2.4-2.6
-	skipIf = unittest.skipIf
-except AttributeError:
-	def skipIf(condition, reason):
-		def skipper(func):
-			def skip(*args, **kwargs):
-				return
-			if condition:
-				return skip
-			return func
-		return skipper
diff --git a/vendor/distribute-0.6.34/setuptools/tests/server.py b/vendor/distribute-0.6.34/setuptools/tests/server.py
deleted file mode 100644
index b2ab7acc7cc195633522f545ad0b5437c8162654..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/server.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""Basic http server for tests to simulate PyPI or custom indexes
-"""
-import urllib2
-import sys
-import time
-import threading
-import BaseHTTPServer
-from BaseHTTPServer import HTTPServer
-from SimpleHTTPServer import SimpleHTTPRequestHandler
-
-class IndexServer(HTTPServer):
-    """Basic single-threaded http server simulating a package index
-
-    You can use this server in unittest like this::
-        s = IndexServer()
-        s.start()
-        index_url = s.base_url() + 'mytestindex'
-        # do some test requests to the index
-        # The index files should be located in setuptools/tests/indexes
-        s.stop()
-    """
-    def __init__(self, server_address=('', 0),
-            RequestHandlerClass=SimpleHTTPRequestHandler):
-        HTTPServer.__init__(self, server_address, RequestHandlerClass)
-        self._run = True
-
-    def serve(self):
-        while self._run:
-            self.handle_request()
-
-    def start(self):
-        self.thread = threading.Thread(target=self.serve)
-        self.thread.start()
-
-    def stop(self):
-        "Stop the server"
-
-        # Let the server finish the last request and wait for a new one.
-        time.sleep(0.1)
-
-        # self.shutdown is not supported on python < 2.6, so just
-        #  set _run to false, and make a request, causing it to
-        #  terminate.
-        self._run = False
-        url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
-        try:
-            if sys.version_info >= (2, 6):
-                urllib2.urlopen(url, timeout=5)
-            else:
-                urllib2.urlopen(url)
-        except urllib2.URLError:
-            # ignore any errors; all that's important is the request
-            pass
-        self.thread.join()
-
-    def base_url(self):
-        port = self.server_port
-        return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
-
-class RequestRecorder(BaseHTTPServer.BaseHTTPRequestHandler):
-    def do_GET(self):
-        requests = vars(self.server).setdefault('requests', [])
-        requests.append(self)
-        self.send_response(200, 'OK')
-
-class MockServer(HTTPServer, threading.Thread):
-    """
-    A simple HTTP Server that records the requests made to it.
-    """
-    def __init__(self, server_address=('', 0),
-            RequestHandlerClass=RequestRecorder):
-        HTTPServer.__init__(self, server_address, RequestHandlerClass)
-        threading.Thread.__init__(self)
-        self.setDaemon(True)
-        self.requests = []
-
-    def run(self):
-        self.serve_forever()
-
-    def url(self):
-        return 'http://localhost:%(server_port)s/' % vars(self)
-    url = property(url)
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_bdist_egg.py b/vendor/distribute-0.6.34/setuptools/tests/test_bdist_egg.py
deleted file mode 100644
index 7da122cc31b7a66898c5b98e79425fb6103e3b8c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_bdist_egg.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""develop tests
-"""
-import sys
-import os, re, shutil, tempfile, unittest
-import tempfile
-import site
-from StringIO import StringIO
-
-from distutils.errors import DistutilsError
-from setuptools.command.bdist_egg import bdist_egg
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo', py_modules=['hi'])
-"""
-
-class TestDevelopTest(unittest.TestCase):
-
-    def setUp(self):
-        self.dir = tempfile.mkdtemp()
-        self.old_cwd = os.getcwd()
-        os.chdir(self.dir)
-        f = open('setup.py', 'w')
-        f.write(SETUP_PY)
-        f.close()
-        f = open('hi.py', 'w')
-        f.write('1\n')
-        f.close()
-        if sys.version >= "2.6":
-            self.old_base = site.USER_BASE
-            site.USER_BASE = tempfile.mkdtemp()
-            self.old_site = site.USER_SITE
-            site.USER_SITE = tempfile.mkdtemp()
-
-    def tearDown(self):
-        os.chdir(self.old_cwd)
-        shutil.rmtree(self.dir)
-        if sys.version >= "2.6":
-            shutil.rmtree(site.USER_BASE)
-            shutil.rmtree(site.USER_SITE)
-            site.USER_BASE = self.old_base
-            site.USER_SITE = self.old_site
-
-    def test_bdist_egg(self):
-        dist = Distribution(dict(
-            script_name='setup.py',
-            script_args=['bdist_egg'],
-            name='foo',
-            py_modules=['hi']
-            ))
-        os.makedirs(os.path.join('build', 'src'))
-        old_stdout = sys.stdout
-        sys.stdout = o = StringIO()
-        try:
-            dist.parse_command_line()
-            dist.run_commands()
-        finally:
-            sys.stdout = old_stdout
-
-        # let's see if we got our egg link at the right place
-        [content] = os.listdir('dist')
-        self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content))
-
-def test_suite():
-    return unittest.makeSuite(TestDevelopTest)
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_build_ext.py b/vendor/distribute-0.6.34/setuptools/tests/test_build_ext.py
deleted file mode 100644
index a520ced9d6a32cc1b53e6fb26b1f223a2a71798b..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_build_ext.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""build_ext tests
-"""
-import os, shutil, tempfile, unittest
-from distutils.command.build_ext import build_ext as distutils_build_ext
-from setuptools.command.build_ext import build_ext
-from setuptools.dist import Distribution
-
-class TestBuildExtTest(unittest.TestCase):
-
-    def test_get_ext_filename(self):
-        # setuptools needs to give back the same
-        # result than distutils, even if the fullname
-        # is not in ext_map
-        dist = Distribution()
-        cmd = build_ext(dist)
-        cmd.ext_map['foo/bar'] = ''
-        res = cmd.get_ext_filename('foo')
-        wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
-        assert res == wanted
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_develop.py b/vendor/distribute-0.6.34/setuptools/tests/test_develop.py
deleted file mode 100644
index 315058c57523b247f4c707d3c0a5a0797a3d5b84..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_develop.py
+++ /dev/null
@@ -1,118 +0,0 @@
-"""develop tests
-"""
-import sys
-import os, shutil, tempfile, unittest
-import tempfile
-import site
-from StringIO import StringIO
-
-from distutils.errors import DistutilsError
-from setuptools.command.develop import develop
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo',
-    packages=['foo'],
-    use_2to3=True,
-)
-"""
-
-INIT_PY = """print "foo"
-"""
-
-class TestDevelopTest(unittest.TestCase):
-
-    def setUp(self):
-        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
-            return
-
-        # Directory structure
-        self.dir = tempfile.mkdtemp()
-        os.mkdir(os.path.join(self.dir, 'foo'))
-        # setup.py
-        setup = os.path.join(self.dir, 'setup.py')
-        f = open(setup, 'w')
-        f.write(SETUP_PY)
-        f.close()
-        self.old_cwd = os.getcwd()
-        # foo/__init__.py
-        init = os.path.join(self.dir, 'foo', '__init__.py')
-        f = open(init, 'w')
-        f.write(INIT_PY)
-        f.close()
-        
-        os.chdir(self.dir)
-        self.old_base = site.USER_BASE
-        site.USER_BASE = tempfile.mkdtemp()
-        self.old_site = site.USER_SITE
-        site.USER_SITE = tempfile.mkdtemp()
-
-    def tearDown(self):
-        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
-            return
-        
-        os.chdir(self.old_cwd)
-        shutil.rmtree(self.dir)
-        shutil.rmtree(site.USER_BASE)
-        shutil.rmtree(site.USER_SITE)
-        site.USER_BASE = self.old_base
-        site.USER_SITE = self.old_site
-
-    def test_develop(self):
-        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
-            return
-        dist = Distribution(
-            dict(name='foo',
-                 packages=['foo'],
-                 use_2to3=True,
-                 version='0.0',
-                 ))
-        dist.script_name = 'setup.py'
-        cmd = develop(dist)
-        cmd.user = 1
-        cmd.ensure_finalized()
-        cmd.install_dir = site.USER_SITE
-        cmd.user = 1
-        old_stdout = sys.stdout
-        #sys.stdout = StringIO()
-        try:
-            cmd.run()
-        finally:
-            sys.stdout = old_stdout
-
-        # let's see if we got our egg link at the right place
-        content = os.listdir(site.USER_SITE)
-        content.sort()
-        self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
-
-        # Check that we are using the right code.
-        egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
-        path = egg_link_file.read().split()[0].strip()
-        egg_link_file.close()
-        init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt')
-        init = init_file.read().strip()
-        init_file.close()
-        if sys.version < "3":
-            self.assertEqual(init, 'print "foo"')
-        else:
-            self.assertEqual(init, 'print("foo")')
-
-    def notest_develop_with_setup_requires(self):
-
-        wanted = ("Could not find suitable distribution for "
-                  "Requirement.parse('I-DONT-EXIST')")
-        old_dir = os.getcwd()
-        os.chdir(self.dir)
-        try:
-            try:
-                dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
-            except DistutilsError, e:
-                error = str(e)
-                if error ==  wanted:
-                    pass
-        finally:
-            os.chdir(old_dir)
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_dist_info.py b/vendor/distribute-0.6.34/setuptools/tests/test_dist_info.py
deleted file mode 100644
index fcb78c36d97de37143b85ceabdba9e9054c756e5..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_dist_info.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""Test .dist-info style distributions.
-"""
-import os
-import shutil
-import tempfile
-import unittest
-import textwrap
-
-try:
-    import ast
-except:
-    pass
-
-import pkg_resources
-
-from setuptools.tests.py26compat import skipIf
-
-def DALS(s):
-    "dedent and left-strip"
-    return textwrap.dedent(s).lstrip()
-
-class TestDistInfo(unittest.TestCase):
-
-    def test_distinfo(self):
-        dists = {}
-        for d in pkg_resources.find_distributions(self.tmpdir):
-            dists[d.project_name] = d
-
-        assert len(dists) == 2, dists
-
-        unversioned = dists['UnversionedDistribution']
-        versioned = dists['VersionedDistribution']
-
-        assert versioned.version == '2.718' # from filename
-        assert unversioned.version == '0.3' # from METADATA
-
-    @skipIf('ast' not in globals(),
-        "ast is used to test conditional dependencies (Python >= 2.6)")
-    def test_conditional_dependencies(self):
-        requires = [pkg_resources.Requirement.parse('splort==4'),
-                    pkg_resources.Requirement.parse('quux>=1.1')]
-
-        for d in pkg_resources.find_distributions(self.tmpdir):
-            self.assertEqual(d.requires(), requires[:1])
-            self.assertEqual(d.requires(extras=('baz',)), requires)
-            self.assertEqual(d.extras, ['baz'])
-
-    def setUp(self):
-        self.tmpdir = tempfile.mkdtemp()
-        versioned = os.path.join(self.tmpdir,
-                                 'VersionedDistribution-2.718.dist-info')
-        os.mkdir(versioned)
-        metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+')
-        metadata_file.write(DALS(
-            """
-            Metadata-Version: 1.2
-            Name: VersionedDistribution
-            Requires-Dist: splort (4)
-            Provides-Extra: baz
-            Requires-Dist: quux (>=1.1); extra == 'baz'
-            """))
-        metadata_file.close()
-
-        unversioned = os.path.join(self.tmpdir,
-                                   'UnversionedDistribution.dist-info')
-        os.mkdir(unversioned)
-        metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+')
-        metadata_file.write(DALS(
-            """
-            Metadata-Version: 1.2
-            Name: UnversionedDistribution
-            Version: 0.3
-            Requires-Dist: splort (==4)
-            Provides-Extra: baz
-            Requires-Dist: quux (>=1.1); extra == 'baz'
-            """))
-        metadata_file.close()
-
-    def tearDown(self):
-        shutil.rmtree(self.tmpdir)
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_easy_install.py b/vendor/distribute-0.6.34/setuptools/tests/test_easy_install.py
deleted file mode 100644
index 582219cef989d7e8b1bfc1d514c1e02482bfc5b5..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_easy_install.py
+++ /dev/null
@@ -1,460 +0,0 @@
-"""Easy install Tests
-"""
-import sys
-import os
-import shutil
-import tempfile
-import unittest
-import site
-import textwrap
-import tarfile
-import urlparse
-import StringIO
-import distutils.core
-
-from setuptools.sandbox import run_setup, SandboxViolation
-from setuptools.command.easy_install import easy_install, fix_jython_executable, get_script_args, main
-from setuptools.command.easy_install import  PthDistributions
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-from pkg_resources import Distribution as PRDistribution
-import setuptools.tests.server
-
-try:
-    # import multiprocessing solely for the purpose of testing its existence
-    __import__('multiprocessing')
-    import logging
-    _LOG = logging.getLogger('test_easy_install')
-    logging.basicConfig(level=logging.INFO, stream=sys.stderr)
-    _MULTIPROC = True
-except ImportError:
-    _MULTIPROC = False
-    _LOG = None
-
-class FakeDist(object):
-    def get_entry_map(self, group):
-        if group != 'console_scripts':
-            return {}
-        return {'name': 'ep'}
-
-    def as_requirement(self):
-        return 'spec'
-
-WANTED = """\
-#!%s
-# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
-__requires__ = 'spec'
-import sys
-from pkg_resources import load_entry_point
-
-if __name__ == '__main__':
-    sys.exit(
-        load_entry_point('spec', 'console_scripts', 'name')()
-    )
-""" % fix_jython_executable(sys.executable, "")
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo')
-"""
-
-class TestEasyInstallTest(unittest.TestCase):
-
-    def test_install_site_py(self):
-        dist = Distribution()
-        cmd = easy_install(dist)
-        cmd.sitepy_installed = False
-        cmd.install_dir = tempfile.mkdtemp()
-        try:
-            cmd.install_site_py()
-            sitepy = os.path.join(cmd.install_dir, 'site.py')
-            self.assertTrue(os.path.exists(sitepy))
-        finally:
-            shutil.rmtree(cmd.install_dir)
-
-    def test_get_script_args(self):
-        dist = FakeDist()
-
-        old_platform = sys.platform
-        try:
-            name, script = [i for i in get_script_args(dist).next()][0:2]
-        finally:
-            sys.platform = old_platform
-
-        self.assertEqual(script, WANTED)
-
-    def test_no_setup_cfg(self):
-        # makes sure easy_install as a command (main)
-        # doesn't use a setup.cfg file that is located
-        # in the current working directory
-        dir = tempfile.mkdtemp()
-        setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w')
-        setup_cfg.write('[easy_install]\nfind_links = http://example.com')
-        setup_cfg.close()
-        setup_py = open(os.path.join(dir, 'setup.py'), 'w')
-        setup_py.write(SETUP_PY)
-        setup_py.close()
-
-        from setuptools.dist import Distribution
-
-        def _parse_command_line(self):
-            msg = 'Error: a local setup.cfg was used'
-            opts = self.command_options
-            if 'easy_install' in opts:
-                assert 'find_links' not in opts['easy_install'], msg
-            return self._old_parse_command_line()
-
-        Distribution._old_parse_command_line = Distribution.parse_command_line
-        Distribution.parse_command_line = _parse_command_line
-
-        old_wd = os.getcwd()
-        try:
-            os.chdir(dir)
-            reset_setup_stop_context(
-                lambda: self.assertRaises(SystemExit, main, [])
-            )
-        finally:
-            os.chdir(old_wd)
-            shutil.rmtree(dir)
-            Distribution.parse_command_line = Distribution._old_parse_command_line
-
-    def test_no_find_links(self):
-        # new option '--no-find-links', that blocks find-links added at
-        # the project level
-        dist = Distribution()
-        cmd = easy_install(dist)
-        cmd.check_pth_processing = lambda: True
-        cmd.no_find_links = True
-        cmd.find_links = ['link1', 'link2']
-        cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
-        cmd.args = ['ok']
-        cmd.ensure_finalized()
-        self.assertEqual(cmd.package_index.scanned_urls, {})
-
-        # let's try without it (default behavior)
-        cmd = easy_install(dist)
-        cmd.check_pth_processing = lambda: True
-        cmd.find_links = ['link1', 'link2']
-        cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
-        cmd.args = ['ok']
-        cmd.ensure_finalized()
-        keys = cmd.package_index.scanned_urls.keys()
-        keys.sort()
-        self.assertEqual(keys, ['link1', 'link2'])
-
-
-class TestPTHFileWriter(unittest.TestCase):
-    def test_add_from_cwd_site_sets_dirty(self):
-        '''a pth file manager should set dirty
-        if a distribution is in site but also the cwd
-        '''
-        pth = PthDistributions('does-not_exist', [os.getcwd()])
-        self.assertTrue(not pth.dirty)
-        pth.add(PRDistribution(os.getcwd()))
-        self.assertTrue(pth.dirty)
-
-    def test_add_from_site_is_ignored(self):
-        if os.name != 'nt':
-            location = '/test/location/does-not-have-to-exist'
-        else:
-            location = 'c:\\does_not_exist'
-        pth = PthDistributions('does-not_exist', [location, ])
-        self.assertTrue(not pth.dirty)
-        pth.add(PRDistribution(location))
-        self.assertTrue(not pth.dirty)
-
-
-class TestUserInstallTest(unittest.TestCase):
-
-    def setUp(self):
-        self.dir = tempfile.mkdtemp()
-        setup = os.path.join(self.dir, 'setup.py')
-        f = open(setup, 'w')
-        f.write(SETUP_PY)
-        f.close()
-        self.old_cwd = os.getcwd()
-        os.chdir(self.dir)
-        if sys.version >= "2.6":
-            self.old_has_site = easy_install_pkg.HAS_USER_SITE
-            self.old_file = easy_install_pkg.__file__
-            self.old_base = site.USER_BASE
-            site.USER_BASE = tempfile.mkdtemp()
-            self.old_site = site.USER_SITE
-            site.USER_SITE = tempfile.mkdtemp()
-            easy_install_pkg.__file__ = site.USER_SITE
-
-    def tearDown(self):
-        os.chdir(self.old_cwd)
-        shutil.rmtree(self.dir)
-        if sys.version >= "2.6":
-            shutil.rmtree(site.USER_BASE)
-            shutil.rmtree(site.USER_SITE)
-            site.USER_BASE = self.old_base
-            site.USER_SITE = self.old_site
-            easy_install_pkg.HAS_USER_SITE = self.old_has_site
-            easy_install_pkg.__file__ = self.old_file
-
-    def test_user_install_implied(self):
-        easy_install_pkg.HAS_USER_SITE = True # disabled sometimes
-        #XXX: replace with something meaningfull
-        if sys.version < "2.6":
-            return #SKIP
-        dist = Distribution()
-        dist.script_name = 'setup.py'
-        cmd = easy_install(dist)
-        cmd.args = ['py']
-        cmd.ensure_finalized()
-        self.assertTrue(cmd.user, 'user should be implied')
-
-    def test_multiproc_atexit(self):
-        if not _MULTIPROC:
-            return
-        _LOG.info('this should not break')
-
-    def test_user_install_not_implied_without_usersite_enabled(self):
-        easy_install_pkg.HAS_USER_SITE = False # usually enabled
-        #XXX: replace with something meaningfull
-        if sys.version < "2.6":
-            return #SKIP
-        dist = Distribution()
-        dist.script_name = 'setup.py'
-        cmd = easy_install(dist)
-        cmd.args = ['py']
-        cmd.initialize_options()
-        self.assertFalse(cmd.user, 'NOT user should be implied')
-
-    def test_local_index(self):
-        # make sure the local index is used
-        # when easy_install looks for installed
-        # packages
-        new_location = tempfile.mkdtemp()
-        target = tempfile.mkdtemp()
-        egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
-        f = open(egg_file, 'w')
-        try:
-            f.write('Name: foo\n')
-        finally:
-            f.close()
-
-        sys.path.append(target)
-        old_ppath = os.environ.get('PYTHONPATH')
-        os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path)
-        try:
-            dist = Distribution()
-            dist.script_name = 'setup.py'
-            cmd = easy_install(dist)
-            cmd.install_dir = target
-            cmd.args = ['foo']
-            cmd.ensure_finalized()
-            cmd.local_index.scan([new_location])
-            res = cmd.easy_install('foo')
-            self.assertEqual(os.path.realpath(res.location),
-                             os.path.realpath(new_location))
-        finally:
-            sys.path.remove(target)
-            for basedir in [new_location, target, ]:
-                if not os.path.exists(basedir) or not os.path.isdir(basedir):
-                    continue
-                try:
-                    shutil.rmtree(basedir)
-                except:
-                    pass
-            if old_ppath is not None:
-                os.environ['PYTHONPATH'] = old_ppath
-            else:
-                del os.environ['PYTHONPATH']
-
-    def test_setup_requires(self):
-        """Regression test for issue #318
-
-        Ensures that a package with setup_requires can be installed when
-        distribute is installed in the user site-packages without causing a
-        SandboxViolation.
-        """
-
-        test_setup_attrs = {
-            'name': 'test_pkg', 'version': '0.0',
-            'setup_requires': ['foobar'],
-            'dependency_links': [os.path.abspath(self.dir)]
-        }
-
-        test_pkg = os.path.join(self.dir, 'test_pkg')
-        test_setup_py = os.path.join(test_pkg, 'setup.py')
-        test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
-        os.mkdir(test_pkg)
-
-        f = open(test_setup_py, 'w')
-        f.write(textwrap.dedent("""\
-            import setuptools
-            setuptools.setup(**%r)
-        """ % test_setup_attrs))
-        f.close()
-
-        foobar_path = os.path.join(self.dir, 'foobar-0.1.tar.gz')
-        make_trivial_sdist(
-            foobar_path,
-            textwrap.dedent("""\
-                import setuptools
-                setuptools.setup(
-                    name='foobar',
-                    version='0.1'
-                )
-            """))
-
-        old_stdout = sys.stdout
-        old_stderr = sys.stderr
-        sys.stdout = StringIO.StringIO()
-        sys.stderr = StringIO.StringIO()
-        try:
-            reset_setup_stop_context(
-                lambda: run_setup(test_setup_py, ['install'])
-            )
-        except SandboxViolation:
-            self.fail('Installation caused SandboxViolation')
-        finally:
-            sys.stdout = old_stdout
-            sys.stderr = old_stderr
-
-
-class TestSetupRequires(unittest.TestCase):
-
-    def test_setup_requires_honors_fetch_params(self):
-        """
-        When easy_install installs a source distribution which specifies
-        setup_requires, it should honor the fetch parameters (such as
-        allow-hosts, index-url, and find-links).
-        """
-        # set up a server which will simulate an alternate package index.
-        p_index = setuptools.tests.server.MockServer()
-        p_index.start()
-        netloc = 1
-        p_index_loc = urlparse.urlparse(p_index.url)[netloc]
-        if p_index_loc.endswith(':0'):
-            # Some platforms (Jython) don't find a port to which to bind,
-            #  so skip this test for them.
-            return
-
-        # I realize this is all-but-impossible to read, because it was
-        #  ported from some well-factored, safe code using 'with'. If you
-        #  need to maintain this code, consider making the changes in
-        #  the parent revision (of this comment) and then port the changes
-        #  back for Python 2.4 (or deprecate Python 2.4).
-
-        def install(dist_file):
-            def install_at(temp_install_dir):
-                def install_env():
-                    ei_params = ['--index-url', p_index.url,
-                        '--allow-hosts', p_index_loc,
-                        '--exclude-scripts', '--install-dir', temp_install_dir,
-                        dist_file]
-                    def install_clean_reset():
-                        def install_clean_argv():
-                            # attempt to install the dist. It should fail because
-                            #  it doesn't exist.
-                            self.assertRaises(SystemExit,
-                                easy_install_pkg.main, ei_params)
-                        argv_context(install_clean_argv, ['easy_install'])
-                    reset_setup_stop_context(install_clean_reset)
-                environment_context(install_env, PYTHONPATH=temp_install_dir)
-            tempdir_context(install_at)
-
-        # create an sdist that has a build-time dependency.
-        self.create_sdist(install)
-
-        # there should have been two or three requests to the server
-        #  (three happens on Python 3.3a)
-        self.assertTrue(2 <= len(p_index.requests) <= 3)
-        self.assertEqual(p_index.requests[0].path, '/does-not-exist/')
-
-    def create_sdist(self, installer):
-        """
-        Create an sdist with a setup_requires dependency (of something that
-        doesn't exist) and invoke installer on it.
-        """
-        def build_sdist(dir):
-            dist_path = os.path.join(dir, 'distribute-test-fetcher-1.0.tar.gz')
-            make_trivial_sdist(
-                dist_path,
-                textwrap.dedent("""
-                    import setuptools
-                    setuptools.setup(
-                        name="distribute-test-fetcher",
-                        version="1.0",
-                        setup_requires = ['does-not-exist'],
-                    )
-                """).lstrip())
-            installer(dist_path)
-        tempdir_context(build_sdist)
-
-
-def make_trivial_sdist(dist_path, setup_py):
-    """Create a simple sdist tarball at dist_path, containing just a
-    setup.py, the contents of which are provided by the setup_py string.
-    """
-
-    setup_py_file = tarfile.TarInfo(name='setup.py')
-    try:
-        # Python 3 (StringIO gets converted to io module)
-        MemFile = StringIO.BytesIO
-    except AttributeError:
-        MemFile = StringIO.StringIO
-    setup_py_bytes = MemFile(setup_py.encode('utf-8'))
-    setup_py_file.size = len(setup_py_bytes.getvalue())
-    dist = tarfile.open(dist_path, 'w:gz')
-    try:
-        dist.addfile(setup_py_file, fileobj=setup_py_bytes)
-    finally:
-        dist.close()
-
-
-def tempdir_context(f, cd=lambda dir:None):
-    """
-    Invoke f in the context
-    """
-    temp_dir = tempfile.mkdtemp()
-    orig_dir = os.getcwd()
-    try:
-        cd(temp_dir)
-        f(temp_dir)
-    finally:
-        cd(orig_dir)
-        shutil.rmtree(temp_dir)
-
-def environment_context(f, **updates):
-    """
-    Invoke f in the context
-    """
-    old_env = os.environ.copy()
-    os.environ.update(updates)
-    try:
-        f()
-    finally:
-        for key in updates:
-            del os.environ[key]
-        os.environ.update(old_env)
-
-def argv_context(f, repl):
-    """
-    Invoke f in the context
-    """
-    old_argv = sys.argv[:]
-    sys.argv[:] = repl
-    try:
-        f()
-    finally:
-        sys.argv[:] = old_argv
-
-def reset_setup_stop_context(f):
-    """
-    When the distribute tests are run using setup.py test, and then
-    one wants to invoke another setup() command (such as easy_install)
-    within those tests, it's necessary to reset the global variable
-    in distutils.core so that the setup() command will run naturally.
-    """
-    setup_stop_after = distutils.core._setup_stop_after
-    distutils.core._setup_stop_after = None
-    try:
-        f()
-    finally:
-        distutils.core._setup_stop_after = setup_stop_after
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_markerlib.py b/vendor/distribute-0.6.34/setuptools/tests/test_markerlib.py
deleted file mode 100644
index aa461846b641d825258bea056ecf990eeec985c0..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_markerlib.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import os
-import unittest
-from setuptools.tests.py26compat import skipIf
-
-try:
-    import ast
-except ImportError:
-    pass
-
-class TestMarkerlib(unittest.TestCase):
-
-    @skipIf('ast' not in globals(),
-        "ast not available (Python < 2.6?)")
-    def test_markers(self):
-        from _markerlib import interpret, default_environment, compile
-        
-        os_name = os.name
-        
-        self.assertTrue(interpret(""))
-        
-        self.assertTrue(interpret("os.name != 'buuuu'"))
-        self.assertTrue(interpret("python_version > '1.0'"))
-        self.assertTrue(interpret("python_version < '5.0'"))
-        self.assertTrue(interpret("python_version <= '5.0'"))
-        self.assertTrue(interpret("python_version >= '1.0'"))
-        self.assertTrue(interpret("'%s' in os.name" % os_name))
-        self.assertTrue(interpret("'buuuu' not in os.name"))
-        
-        self.assertFalse(interpret("os.name == 'buuuu'"))
-        self.assertFalse(interpret("python_version < '1.0'"))
-        self.assertFalse(interpret("python_version > '5.0'"))
-        self.assertFalse(interpret("python_version >= '5.0'"))
-        self.assertFalse(interpret("python_version <= '1.0'"))
-        self.assertFalse(interpret("'%s' not in os.name" % os_name))
-        self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))    
-        
-        environment = default_environment()
-        environment['extra'] = 'test'
-        self.assertTrue(interpret("extra == 'test'", environment))
-        self.assertFalse(interpret("extra == 'doc'", environment))
-        
-        def raises_nameError():
-            try:
-                interpret("python.version == '42'")
-            except NameError:
-                pass
-            else:
-                raise Exception("Expected NameError")
-        
-        raises_nameError()
-        
-        def raises_syntaxError():
-            try:
-                interpret("(x for x in (4,))")
-            except SyntaxError:
-                pass
-            else:
-                raise Exception("Expected SyntaxError")
-            
-        raises_syntaxError()
-        
-        statement = "python_version == '5'"
-        self.assertEqual(compile(statement).__doc__, statement)
-        
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_packageindex.py b/vendor/distribute-0.6.34/setuptools/tests/test_packageindex.py
deleted file mode 100644
index 3e446b54d4aabb325648ee7a5eebd6b3e22e6701..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_packageindex.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""Package Index Tests
-"""
-import sys
-import unittest
-import urllib2
-import pkg_resources
-import httplib
-import distutils.errors
-import setuptools.package_index
-from server import IndexServer
-
-class TestPackageIndex(unittest.TestCase):
-
-    def test_bad_url_bad_port(self):
-        index = setuptools.package_index.PackageIndex()
-        url = 'http://127.0.0.1:0/nonesuch/test_package_index'
-        try:
-            v = index.open_url(url)
-        except Exception, v:
-            self.assertTrue(url in str(v))
-        else:
-            self.assertTrue(isinstance(v,urllib2.HTTPError))
-
-    def test_bad_url_typo(self):
-        # issue 16
-        # easy_install inquant.contentmirror.plone breaks because of a typo
-        # in its home URL
-        index = setuptools.package_index.PackageIndex(
-            hosts=('www.example.com',)
-        )
-
-        url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
-        try:
-            v = index.open_url(url)
-        except Exception, v:
-            self.assertTrue(url in str(v))
-        else:
-            self.assertTrue(isinstance(v, urllib2.HTTPError))
-
-    def test_bad_url_bad_status_line(self):
-        index = setuptools.package_index.PackageIndex(
-            hosts=('www.example.com',)
-        )
-
-        def _urlopen(*args):
-            import httplib
-            raise httplib.BadStatusLine('line')
-
-        old_urlopen = urllib2.urlopen
-        urllib2.urlopen = _urlopen
-        url = 'http://example.com'
-        try:
-            try:
-                v = index.open_url(url)
-            except Exception, v:
-                self.assertTrue('line' in str(v))
-            else:
-                raise AssertionError('Should have raise here!')
-        finally:
-            urllib2.urlopen = old_urlopen
-
-    def test_bad_url_double_scheme(self):
-        """
-        A bad URL with a double scheme should raise a DistutilsError.
-        """
-        index = setuptools.package_index.PackageIndex(
-            hosts=('www.example.com',)
-        )
-
-        # issue 20
-        url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
-        try:
-            index.open_url(url)
-        except distutils.errors.DistutilsError, error:
-            msg = unicode(error)
-            assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
-            return
-        raise RuntimeError("Did not raise")
-
-    def test_bad_url_screwy_href(self):
-        index = setuptools.package_index.PackageIndex(
-            hosts=('www.example.com',)
-        )
-
-        # issue #160
-        if sys.version_info[0] == 2 and sys.version_info[1] == 7:
-            # this should not fail
-            url = 'http://example.com'
-            page = ('<a href="http://www.famfamfam.com]('
-                    'http://www.famfamfam.com/">')
-            index.process_index(url, page)
-
-    def test_url_ok(self):
-        index = setuptools.package_index.PackageIndex(
-            hosts=('www.example.com',)
-        )
-        url = 'file:///tmp/test_package_index'
-        self.assertTrue(index.url_ok(url, True))
-
-    def test_links_priority(self):
-        """
-        Download links from the pypi simple index should be used before
-        external download links.
-        http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
-
-        Usecase :
-        - someone uploads a package on pypi, a md5 is generated
-        - someone manually copies this link (with the md5 in the url) onto an
-          external page accessible from the package page.
-        - someone reuploads the package (with a different md5)
-        - while easy_installing, an MD5 error occurs because the external link
-          is used
-        -> Distribute should use the link from pypi, not the external one.
-        """
-        if sys.platform.startswith('java'):
-            # Skip this test on jython because binding to :0 fails
-            return
-
-        # start an index server
-        server = IndexServer()
-        server.start()
-        index_url = server.base_url() + 'test_links_priority/simple/'
-
-        # scan a test index
-        pi = setuptools.package_index.PackageIndex(index_url)
-        requirement = pkg_resources.Requirement.parse('foobar')
-        pi.find_packages(requirement)
-        server.stop()
-
-        # the distribution has been found
-        self.assertTrue('foobar' in pi)
-        # we have only one link, because links are compared without md5
-        self.assertTrue(len(pi['foobar'])==1)
-        # the link should be from the index
-        self.assertTrue('correct_md5' in pi['foobar'][0].location)
-
-    def test_parse_bdist_wininst(self):
-        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
-            'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
-        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
-            'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
-        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
-            'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
-        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
-            'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_resources.py b/vendor/distribute-0.6.34/setuptools/tests/test_resources.py
deleted file mode 100644
index 0bc1a0953dabee3620b93235c4f3109b82b9409a..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_resources.py
+++ /dev/null
@@ -1,659 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove
-from unittest import TestCase, makeSuite; from pkg_resources import *
-from setuptools.command.easy_install import get_script_header, is_sh
-import os, pkg_resources, sys, StringIO, tempfile, shutil
-try: frozenset
-except NameError:
-    from sets import ImmutableSet as frozenset
-
-def safe_repr(obj, short=False):
-    """ copied from Python2.7"""
-    try:
-        result = repr(obj)
-    except Exception:
-        result = object.__repr__(obj)
-    if not short or len(result) < _MAX_LENGTH:
-        return result
-    return result[:_MAX_LENGTH] + ' [truncated]...'
-
-class Metadata(EmptyProvider):
-    """Mock object to return metadata as if from an on-disk distribution"""
-
-    def __init__(self,*pairs):
-        self.metadata = dict(pairs)
-
-    def has_metadata(self,name):
-        return name in self.metadata
-
-    def get_metadata(self,name):
-        return self.metadata[name]
-
-    def get_metadata_lines(self,name):
-        return yield_lines(self.get_metadata(name))
-
-class DistroTests(TestCase):
-
-    def testCollection(self):
-        # empty path should produce no distributions
-        ad = Environment([], platform=None, python=None)
-        self.assertEqual(list(ad), [])
-        self.assertEqual(ad['FooPkg'],[])
-        ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
-        ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
-        ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
-
-        # Name is in there now
-        self.assertTrue(ad['FooPkg'])
-        # But only 1 package
-        self.assertEqual(list(ad), ['foopkg'])
-
-        # Distributions sort by version
-        self.assertEqual(
-            [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
-        )
-        # Removing a distribution leaves sequence alone
-        ad.remove(ad['FooPkg'][1])
-        self.assertEqual(
-            [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
-        )
-        # And inserting adds them in order
-        ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
-        self.assertEqual(
-            [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
-        )
-
-        ws = WorkingSet([])
-        foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
-        foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
-        req, = parse_requirements("FooPkg>=1.3")
-
-        # Nominal case: no distros on path, should yield all applicable
-        self.assertEqual(ad.best_match(req,ws).version, '1.9')
-        # If a matching distro is already installed, should return only that
-        ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
-
-        # If the first matching distro is unsuitable, it's a version conflict
-        ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
-        self.assertRaises(VersionConflict, ad.best_match, req, ws)
-
-        # If more than one match on the path, the first one takes precedence
-        ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
-        self.assertEqual(ad.best_match(req,ws).version, '1.4')
-
-    def checkFooPkg(self,d):
-        self.assertEqual(d.project_name, "FooPkg")
-        self.assertEqual(d.key, "foopkg")
-        self.assertEqual(d.version, "1.3-1")
-        self.assertEqual(d.py_version, "2.4")
-        self.assertEqual(d.platform, "win32")
-        self.assertEqual(d.parsed_version, parse_version("1.3-1"))
-
-    def testDistroBasics(self):
-        d = Distribution(
-            "/some/path",
-            project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
-        )
-        self.checkFooPkg(d)
-
-        d = Distribution("/some/path")
-        self.assertEqual(d.py_version, sys.version[:3])
-        self.assertEqual(d.platform, None)
-
-    def testDistroParse(self):
-        d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
-        self.checkFooPkg(d)
-        d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
-        self.checkFooPkg(d)
-
-    def testDistroMetadata(self):
-        d = Distribution(
-            "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
-            metadata = Metadata(
-                ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
-            )
-        )
-        self.checkFooPkg(d)
-
-
-    def distRequires(self, txt):
-        return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
-
-    def checkRequires(self, dist, txt, extras=()):
-        self.assertEqual(
-            list(dist.requires(extras)),
-            list(parse_requirements(txt))
-        )
-
-    def testDistroDependsSimple(self):
-        for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
-            self.checkRequires(self.distRequires(v), v)
-
-
-    def testResolve(self):
-        ad = Environment([]); ws = WorkingSet([])
-        # Resolving no requirements -> nothing to install
-        self.assertEqual( list(ws.resolve([],ad)), [] )
-        # Request something not in the collection -> DistributionNotFound
-        self.assertRaises(
-            DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
-        )
-        Foo = Distribution.from_filename(
-            "/foo_dir/Foo-1.2.egg",
-            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
-        )
-        ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
-
-        # Request thing(s) that are available -> list to activate
-        for i in range(3):
-            targets = list(ws.resolve(parse_requirements("Foo"), ad))
-            self.assertEqual(targets, [Foo])
-            map(ws.add,targets)
-        self.assertRaises(VersionConflict, ws.resolve,
-            parse_requirements("Foo==0.9"), ad)
-        ws = WorkingSet([]) # reset
-
-        # Request an extra that causes an unresolved dependency for "Baz"
-        self.assertRaises(
-            DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
-        )
-        Baz = Distribution.from_filename(
-            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
-        )
-        ad.add(Baz)
-
-        # Activation list now includes resolved dependency
-        self.assertEqual(
-            list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
-        )
-        # Requests for conflicting versions produce VersionConflict
-        self.assertRaises( VersionConflict,
-            ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
-        )
-
-    def testDistroDependsOptions(self):
-        d = self.distRequires("""
-            Twisted>=1.5
-            [docgen]
-            ZConfig>=2.0
-            docutils>=0.3
-            [fastcgi]
-            fcgiapp>=0.1""")
-        self.checkRequires(d,"Twisted>=1.5")
-        self.checkRequires(
-            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
-        )
-        self.checkRequires(
-            d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
-        )
-        self.checkRequires(
-            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
-            ["docgen","fastcgi"]
-        )
-        self.checkRequires(
-            d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
-            ["fastcgi", "docgen"]
-        )
-        self.assertRaises(UnknownExtra, d.requires, ["foo"])
-
-    def testSetuptoolsDistributeCombination(self):
-        # Ensure that installing a 0.7-series setuptools fails.  PJE says that
-        # it will not co-exist.
-        ws = WorkingSet([])
-        d = Distribution(
-            "/some/path",
-            project_name="setuptools",
-            version="0.7a1")
-        self.assertRaises(ValueError, ws.add, d)
-        # A 0.6-series is no problem
-        d2 = Distribution(
-            "/some/path",
-            project_name="setuptools",
-            version="0.6c9")
-        ws.add(d2)
-
-        # a unexisting version needs to work
-        ws = WorkingSet([])
-        d3 = Distribution(
-            "/some/path",
-            project_name="setuptools")
-        ws.add(d3)
-
-
-class EntryPointTests(TestCase):
-
-    def assertfields(self, ep):
-        self.assertEqual(ep.name,"foo")
-        self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
-        self.assertEqual(ep.attrs, ("EntryPointTests",))
-        self.assertEqual(ep.extras, ("x",))
-        self.assertTrue(ep.load() is EntryPointTests)
-        self.assertEqual(
-            str(ep),
-            "foo = setuptools.tests.test_resources:EntryPointTests [x]"
-        )
-
-    def setUp(self):
-        self.dist = Distribution.from_filename(
-            "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
-
-    def testBasics(self):
-        ep = EntryPoint(
-            "foo", "setuptools.tests.test_resources", ["EntryPointTests"],
-            ["x"], self.dist
-        )
-        self.assertfields(ep)
-
-    def testParse(self):
-        s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
-        ep = EntryPoint.parse(s, self.dist)
-        self.assertfields(ep)
-
-        ep = EntryPoint.parse("bar baz=  spammity[PING]")
-        self.assertEqual(ep.name,"bar baz")
-        self.assertEqual(ep.module_name,"spammity")
-        self.assertEqual(ep.attrs, ())
-        self.assertEqual(ep.extras, ("ping",))
-
-        ep = EntryPoint.parse(" fizzly =  wocka:foo")
-        self.assertEqual(ep.name,"fizzly")
-        self.assertEqual(ep.module_name,"wocka")
-        self.assertEqual(ep.attrs, ("foo",))
-        self.assertEqual(ep.extras, ())
-
-    def testRejects(self):
-        for ep in [
-            "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
-        ]:
-            try: EntryPoint.parse(ep)
-            except ValueError: pass
-            else: raise AssertionError("Should've been bad", ep)
-
-    def checkSubMap(self, m):
-        self.assertEqual(len(m), len(self.submap_expect))
-        for key, ep in self.submap_expect.iteritems():
-            self.assertEqual(repr(m.get(key)), repr(ep))
-
-    submap_expect = dict(
-        feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
-        feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
-        feature3=EntryPoint('feature3', 'this.module', extras=['something'])
-    )
-    submap_str = """
-            # define features for blah blah
-            feature1 = somemodule:somefunction
-            feature2 = another.module:SomeClass [extra1,extra2]
-            feature3 = this.module [something]
-    """
-
-    def testParseList(self):
-        self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
-        self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
-        self.assertRaises(ValueError, EntryPoint.parse_group, "x",
-            ["foo=baz", "foo=bar"])
-
-    def testParseMap(self):
-        m = EntryPoint.parse_map({'xyz':self.submap_str})
-        self.checkSubMap(m['xyz'])
-        self.assertEqual(m.keys(),['xyz'])
-        m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
-        self.checkSubMap(m['xyz'])
-        self.assertEqual(m.keys(),['xyz'])
-        self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
-        self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
-
-class RequirementsTests(TestCase):
-
-    def testBasics(self):
-        r = Requirement.parse("Twisted>=1.2")
-        self.assertEqual(str(r),"Twisted>=1.2")
-        self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
-        self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
-        self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
-        self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
-        self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
-        self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
-        self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
-
-    def testOrdering(self):
-        r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
-        r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
-        self.assertEqual(r1,r2)
-        self.assertEqual(str(r1),str(r2))
-        self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
-
-    def testBasicContains(self):
-        r = Requirement("Twisted", [('>=','1.2')], ())
-        foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
-        twist11  = Distribution.from_filename("Twisted-1.1.egg")
-        twist12  = Distribution.from_filename("Twisted-1.2.egg")
-        self.assertTrue(parse_version('1.2') in r)
-        self.assertTrue(parse_version('1.1') not in r)
-        self.assertTrue('1.2' in r)
-        self.assertTrue('1.1' not in r)
-        self.assertTrue(foo_dist not in r)
-        self.assertTrue(twist11 not in r)
-        self.assertTrue(twist12 in r)
-
-    def testAdvancedContains(self):
-        r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
-        for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
-            self.assertTrue(v in r, (v,r))
-        for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
-            self.assertTrue(v not in r, (v,r))
-
-
-    def testOptionsAndHashing(self):
-        r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
-        r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
-        r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
-        self.assertEqual(r1,r2)
-        self.assertEqual(r1,r3)
-        self.assertEqual(r1.extras, ("foo","bar"))
-        self.assertEqual(r2.extras, ("bar","foo"))  # extras are normalized
-        self.assertEqual(hash(r1), hash(r2))
-        self.assertEqual(
-            hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
-                            frozenset(["foo","bar"])))
-        )
-
-    def testVersionEquality(self):
-        r1 = Requirement.parse("foo==0.3a2")
-        r2 = Requirement.parse("foo!=0.3a4")
-        d = Distribution.from_filename
-
-        self.assertTrue(d("foo-0.3a4.egg") not in r1)
-        self.assertTrue(d("foo-0.3a1.egg") not in r1)
-        self.assertTrue(d("foo-0.3a4.egg") not in r2)
-
-        self.assertTrue(d("foo-0.3a2.egg") in r1)
-        self.assertTrue(d("foo-0.3a2.egg") in r2)
-        self.assertTrue(d("foo-0.3a3.egg") in r2)
-        self.assertTrue(d("foo-0.3a5.egg") in r2)
-
-    def testDistributeSetuptoolsOverride(self):
-        # Plain setuptools or distribute mean we return distribute.
-        self.assertEqual(
-            Requirement.parse('setuptools').project_name, 'distribute')
-        self.assertEqual(
-            Requirement.parse('distribute').project_name, 'distribute')
-        # setuptools lower than 0.7 means distribute
-        self.assertEqual(
-            Requirement.parse('setuptools==0.6c9').project_name, 'distribute')
-        self.assertEqual(
-            Requirement.parse('setuptools==0.6c10').project_name, 'distribute')
-        self.assertEqual(
-            Requirement.parse('setuptools>=0.6').project_name, 'distribute')
-        self.assertEqual(
-            Requirement.parse('setuptools < 0.7').project_name, 'distribute')
-        # setuptools 0.7 and higher means setuptools.
-        self.assertEqual(
-            Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
-        self.assertEqual(
-            Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
-        self.assertEqual(
-            Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
-
-
-
-
-
-
-
-
-
-
-
-class ParseTests(TestCase):
-
-    def testEmptyParse(self):
-        self.assertEqual(list(parse_requirements('')), [])
-
-    def testYielding(self):
-        for inp,out in [
-            ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
-            (['x\n\n','y'], ['x','y']),
-        ]:
-            self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
-
-    def testSplitting(self):
-        self.assertEqual(
-            list(
-                pkg_resources.split_sections("""
-                    x
-                    [Y]
-                    z
-
-                    a
-                    [b ]
-                    # foo
-                    c
-                    [ d]
-                    [q]
-                    v
-                    """
-                )
-            ),
-            [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
-        )
-        self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
-
-    def testSafeName(self):
-        self.assertEqual(safe_name("adns-python"), "adns-python")
-        self.assertEqual(safe_name("WSGI Utils"),  "WSGI-Utils")
-        self.assertEqual(safe_name("WSGI  Utils"), "WSGI-Utils")
-        self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
-        self.assertNotEqual(safe_name("peak.web"), "peak-web")
-
-    def testSafeVersion(self):
-        self.assertEqual(safe_version("1.2-1"), "1.2-1")
-        self.assertEqual(safe_version("1.2 alpha"),  "1.2.alpha")
-        self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
-        self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
-        self.assertEqual(safe_version("peak.web"), "peak.web")
-
-    def testSimpleRequirements(self):
-        self.assertEqual(
-            list(parse_requirements('Twis-Ted>=1.2-1')),
-            [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
-        )
-        self.assertEqual(
-            list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
-            [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
-        )
-        self.assertEqual(
-            Requirement.parse("FooBar==1.99a3"),
-            Requirement("FooBar", [('==','1.99a3')], ())
-        )
-        self.assertRaises(ValueError,Requirement.parse,">=2.3")
-        self.assertRaises(ValueError,Requirement.parse,"x\\")
-        self.assertRaises(ValueError,Requirement.parse,"x==2 q")
-        self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
-        self.assertRaises(ValueError,Requirement.parse,"#")
-
-    def testVersionEquality(self):
-        def c(s1,s2):
-            p1, p2 = parse_version(s1),parse_version(s2)
-            self.assertEqual(p1,p2, (s1,s2,p1,p2))
-
-        c('0.4', '0.4.0')
-        c('0.4.0.0', '0.4.0')
-        c('0.4.0-0', '0.4-0')
-        c('0pl1', '0.0pl1')
-        c('0pre1', '0.0c1')
-        c('0.0.0preview1', '0c1')
-        c('0.0c1', '0rc1')
-        c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
-
-    def testVersionOrdering(self):
-        def c(s1,s2):
-            p1, p2 = parse_version(s1),parse_version(s2)
-            self.assertTrue(p1<p2, (s1,s2,p1,p2))
-
-        c('2.1','2.1.1')
-        c('2.1.0','2.10')
-        c('2a1','2b0')
-        c('2b1','2c0')
-        c('2a1','2.1')
-        c('2.3a1', '2.3')
-        c('2.1-1', '2.1-2')
-        c('2.1-1', '2.1.1')
-        c('2.1', '2.1.1-1')
-        c('2.1', '2.1pl4')
-        c('2.1a0-20040501', '2.1')
-        c('1.1', '02.1')
-        c('A56','B27')
-        c('3.2', '3.2.pl0')
-        c('3.2-1', '3.2pl1')
-        c('3.2pl1', '3.2pl1-1')
-        c('0.4', '4.0')
-        c('0.0.4', '0.4.0')
-        c('0pl1', '0.4pl1')
-        c('2.1dev','2.1a0')
-        c('2.1.0rc1','2.1.0')
-        c('2.1.0','2.1.0-rc0')
-        c('2.1.0','2.1.0-a')
-        c('2.1.0','2.1.0-alpha')
-        c('2.1.0','2.1.0-foo')
-        c('1.0','1.0-1')
-        c('1.0-1','1.0.1')
-        c('1.0a','1.0b')
-        c('1.0dev','1.0rc1')
-        c('1.0pre','1.0')
-        c('1.0pre','1.0')
-        c('1.0a','1.0-a')
-        c('1.0rc1','1.0-rc1')
-
-        torture ="""
-        0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
-        0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
-        0.77.2-1 0.77.1-1 0.77.0-1
-        """.split()
-
-        for p,v1 in enumerate(torture):
-            for v2 in torture[p+1:]:
-                c(v2,v1)
-
-
-
-
-
-
-
-
-class ScriptHeaderTests(TestCase):
-    non_ascii_exe = '/Users/José/bin/python'
-
-    def test_get_script_header(self):
-        if not sys.platform.startswith('java') or not is_sh(sys.executable):
-            # This test is for non-Jython platforms
-            self.assertEqual(get_script_header('#!/usr/local/bin/python'),
-                             '#!%s\n' % os.path.normpath(sys.executable))
-            self.assertEqual(get_script_header('#!/usr/bin/python -x'),
-                             '#!%s  -x\n' % os.path.normpath(sys.executable))
-            self.assertEqual(get_script_header('#!/usr/bin/python',
-                                               executable=self.non_ascii_exe),
-                             '#!%s -x\n' % self.non_ascii_exe)
-
-    def test_get_script_header_jython_workaround(self):
-        # This test doesn't work with Python 3 in some locales
-        if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
-            in (None, "C", "POSIX")):
-            return
-
-        class java:
-            class lang:
-                class System:
-                    @staticmethod
-                    def getProperty(property):
-                        return ""
-        sys.modules["java"] = java
-
-        platform = sys.platform
-        sys.platform = 'java1.5.0_13'
-        stdout = sys.stdout
-        try:
-            # A mock sys.executable that uses a shebang line (this file)
-            exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
-            self.assertEqual(
-                get_script_header('#!/usr/local/bin/python', executable=exe),
-                '#!/usr/bin/env %s\n' % exe)
-
-            # Ensure we generate what is basically a broken shebang line
-            # when there's options, with a warning emitted
-            sys.stdout = sys.stderr = StringIO.StringIO()
-            self.assertEqual(get_script_header('#!/usr/bin/python -x',
-                                               executable=exe),
-                             '#!%s  -x\n' % exe)
-            self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
-            sys.stdout = sys.stderr = StringIO.StringIO()
-            self.assertEqual(get_script_header('#!/usr/bin/python',
-                                               executable=self.non_ascii_exe),
-                             '#!%s -x\n' % self.non_ascii_exe)
-            self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
-        finally:
-            del sys.modules["java"]
-            sys.platform = platform
-            sys.stdout = stdout
-
-
-
-
-class NamespaceTests(TestCase):
-
-    def setUp(self):
-        self._ns_pkgs = pkg_resources._namespace_packages.copy()
-        self._tmpdir = tempfile.mkdtemp(prefix="tests-distribute-")
-        os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
-        self._prev_sys_path = sys.path[:]
-        sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
-
-    def tearDown(self):
-        shutil.rmtree(self._tmpdir)
-        pkg_resources._namespace_packages = self._ns_pkgs.copy()
-        sys.path = self._prev_sys_path[:]
-
-    def _assertIn(self, member, container):
-        """ assertIn and assertTrue does not exist in Python2.3"""
-        if member not in container:
-            standardMsg = '%s not found in %s' % (safe_repr(member),
-                                                  safe_repr(container))
-            self.fail(self._formatMessage(msg, standardMsg))
-
-    def test_two_levels_deep(self):
-        """
-        Test nested namespace packages
-        Create namespace packages in the following tree :
-            site-packages-1/pkg1/pkg2
-            site-packages-2/pkg1/pkg2
-        Check both are in the _namespace_packages dict and that their __path__
-        is correct
-        """
-        sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
-        os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
-        os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
-        ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
-        for site in ["site-pkgs", "site-pkgs2"]:
-            pkg1_init = open(os.path.join(self._tmpdir, site,
-                             "pkg1", "__init__.py"), "w")
-            pkg1_init.write(ns_str)
-            pkg1_init.close()
-            pkg2_init = open(os.path.join(self._tmpdir, site,
-                             "pkg1", "pkg2", "__init__.py"), "w")
-            pkg2_init.write(ns_str)
-            pkg2_init.close()
-        import pkg1
-        self._assertIn("pkg1", pkg_resources._namespace_packages.keys())
-        try:
-            import pkg1.pkg2
-        except ImportError, e:
-            self.fail("Distribute tried to import the parent namespace package")
-        # check the _namespace_packages dict
-        self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys())
-        self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"])
-        # check the __path__ attribute contains both paths
-        self.assertEqual(pkg1.pkg2.__path__, [
-                os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
-                os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2") ])
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_sandbox.py b/vendor/distribute-0.6.34/setuptools/tests/test_sandbox.py
deleted file mode 100644
index 1609ee861b3ae958d31fbd58c638d79deb5b1ff0..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_sandbox.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""develop tests
-"""
-import sys
-import os
-import shutil
-import unittest
-import tempfile
-
-from setuptools.sandbox import DirectorySandbox, SandboxViolation
-
-def has_win32com():
-    """
-    Run this to determine if the local machine has win32com, and if it
-    does, include additional tests.
-    """
-    if not sys.platform.startswith('win32'):
-        return False
-    try:
-        mod = __import__('win32com')
-    except ImportError:
-        return False
-    return True
-
-class TestSandbox(unittest.TestCase):
-
-    def setUp(self):
-        self.dir = tempfile.mkdtemp()
-
-    def tearDown(self):
-        shutil.rmtree(self.dir)
-
-    def test_devnull(self):
-        if sys.version < '2.4':
-            return
-        sandbox = DirectorySandbox(self.dir)
-        sandbox.run(self._file_writer(os.devnull))
-
-    def _file_writer(path):
-        def do_write():
-            f = open(path, 'w')
-            f.write('xxx')
-            f.close()
-        return do_write
-
-    _file_writer = staticmethod(_file_writer)
-
-    if has_win32com():
-        def test_win32com(self):
-            """
-            win32com should not be prevented from caching COM interfaces
-            in gen_py.
-            """
-            import win32com
-            gen_py = win32com.__gen_path__
-            target = os.path.join(gen_py, 'test_write')
-            sandbox = DirectorySandbox(self.dir)
-            try:
-                try:
-                    sandbox.run(self._file_writer(target))
-                except SandboxViolation:
-                    self.fail("Could not create gen_py file due to SandboxViolation")
-            finally:
-                if os.path.exists(target): os.remove(target)
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_sdist.py b/vendor/distribute-0.6.34/setuptools/tests/test_sdist.py
deleted file mode 100644
index a9d5d6e56c188608d74e2a44668712aa8dc4e102..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_sdist.py
+++ /dev/null
@@ -1,383 +0,0 @@
-# -*- coding: utf-8 -*-
-"""sdist tests"""
-
-
-import os
-import shutil
-import sys
-import tempfile
-import unittest
-import urllib
-import unicodedata
-from StringIO import StringIO
-
-
-from setuptools.command.sdist import sdist
-from setuptools.command.egg_info import manifest_maker
-from setuptools.dist import Distribution
-
-
-SETUP_ATTRS = {
-    'name': 'sdist_test',
-    'version': '0.0',
-    'packages': ['sdist_test'],
-    'package_data': {'sdist_test': ['*.txt']}
-}
-
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(**%r)
-""" % SETUP_ATTRS
-
-
-if sys.version_info >= (3,):
-    LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
-else:
-    LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
-
-
-# Cannot use context manager because of Python 2.4
-def quiet():
-    global old_stdout, old_stderr
-    old_stdout, old_stderr = sys.stdout, sys.stderr
-    sys.stdout, sys.stderr = StringIO(), StringIO()
-
-def unquiet():
-    sys.stdout, sys.stderr = old_stdout, old_stderr
-
-
-# Fake byte literals for Python <= 2.5
-def b(s, encoding='utf-8'):
-    if sys.version_info >= (3,):
-        return s.encode(encoding)
-    return s
-
-
-# Convert to POSIX path
-def posix(path):
-    if sys.version_info >= (3,) and not isinstance(path, unicode):
-        return path.replace(os.sep.encode('ascii'), b('/'))
-    else:
-        return path.replace(os.sep, '/')
-
-
-# HFS Plus uses decomposed UTF-8
-def decompose(path):
-    if isinstance(path, unicode):
-        return unicodedata.normalize('NFD', path)
-    try:
-        path = path.decode('utf-8')
-        path = unicodedata.normalize('NFD', path)
-        path = path.encode('utf-8')
-    except UnicodeError:
-        pass # Not UTF-8
-    return path
-
-
-class TestSdistTest(unittest.TestCase):
-
-    def setUp(self):
-        self.temp_dir = tempfile.mkdtemp()
-        f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
-        f.write(SETUP_PY)
-        f.close()
-        # Set up the rest of the test package
-        test_pkg = os.path.join(self.temp_dir, 'sdist_test')
-        os.mkdir(test_pkg)
-        # *.rst was not included in package_data, so c.rst should not be
-        # automatically added to the manifest when not under version control
-        for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
-            # Just touch the files; their contents are irrelevant
-            open(os.path.join(test_pkg, fname), 'w').close()
-
-        self.old_cwd = os.getcwd()
-        os.chdir(self.temp_dir)
-
-    def tearDown(self):
-        os.chdir(self.old_cwd)
-        shutil.rmtree(self.temp_dir)
-
-    def test_package_data_in_sdist(self):
-        """Regression test for pull request #4: ensures that files listed in
-        package_data are included in the manifest even if they're not added to
-        version control.
-        """
-
-        dist = Distribution(SETUP_ATTRS)
-        dist.script_name = 'setup.py'
-        cmd = sdist(dist)
-        cmd.ensure_finalized()
-
-        # squelch output
-        quiet()
-        try:
-            cmd.run()
-        finally:
-            unquiet()
-
-        manifest = cmd.filelist.files
-        self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
-        self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)
-        self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest)
-
-    def test_manifest_is_written_with_utf8_encoding(self):
-        # Test for #303.
-        dist = Distribution(SETUP_ATTRS)
-        dist.script_name = 'setup.py'
-        mm = manifest_maker(dist)
-        mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
-        os.mkdir('sdist_test.egg-info')
-
-        # UTF-8 filename
-        filename = os.path.join('sdist_test', 'smörbröd.py')
-
-        # Add UTF-8 filename and write manifest
-        quiet()
-        try:
-            mm.run()
-            mm.filelist.files.append(filename)
-            mm.write_manifest()
-        finally:
-            unquiet()
-
-        manifest = open(mm.manifest, 'rbU')
-        contents = manifest.read()
-        manifest.close()
-
-        # The manifest should be UTF-8 encoded
-        try:
-            u_contents = contents.decode('UTF-8')
-        except UnicodeDecodeError, e:
-            self.fail(e)
-
-        # The manifest should contain the UTF-8 filename
-        if sys.version_info >= (3,):
-            self.assertTrue(posix(filename) in u_contents)
-        else:
-            self.assertTrue(posix(filename) in contents)
-
-    # Python 3 only
-    if sys.version_info >= (3,):
-
-        def test_write_manifest_allows_utf8_filenames(self):
-            # Test for #303.
-            dist = Distribution(SETUP_ATTRS)
-            dist.script_name = 'setup.py'
-            mm = manifest_maker(dist)
-            mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
-            os.mkdir('sdist_test.egg-info')
-
-            # UTF-8 filename
-            filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
-
-            # Add filename and write manifest
-            quiet()
-            try:
-                mm.run()
-                u_filename = filename.decode('utf-8')
-                mm.filelist.files.append(u_filename)
-                # Re-write manifest
-                mm.write_manifest()
-            finally:
-                unquiet()
-
-            manifest = open(mm.manifest, 'rbU')
-            contents = manifest.read()
-            manifest.close()
-
-            # The manifest should be UTF-8 encoded
-            try:
-                contents.decode('UTF-8')
-            except UnicodeDecodeError, e:
-                self.fail(e)
-
-            # The manifest should contain the UTF-8 filename
-            self.assertTrue(posix(filename) in contents)
-
-            # The filelist should have been updated as well
-            self.assertTrue(u_filename in mm.filelist.files)
-
-        def test_write_manifest_skips_non_utf8_filenames(self):
-            # Test for #303.
-            dist = Distribution(SETUP_ATTRS)
-            dist.script_name = 'setup.py'
-            mm = manifest_maker(dist)
-            mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
-            os.mkdir('sdist_test.egg-info')
-
-            # Latin-1 filename
-            filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
-
-            # Add filename with surrogates and write manifest
-            quiet()
-            try:
-                mm.run()
-                u_filename = filename.decode('utf-8', 'surrogateescape')
-                mm.filelist.files.append(u_filename)
-                # Re-write manifest
-                mm.write_manifest()
-            finally:
-                unquiet()
-
-            manifest = open(mm.manifest, 'rbU')
-            contents = manifest.read()
-            manifest.close()
-
-            # The manifest should be UTF-8 encoded
-            try:
-                contents.decode('UTF-8')
-            except UnicodeDecodeError, e:
-                self.fail(e)
-
-            # The Latin-1 filename should have been skipped
-            self.assertFalse(posix(filename) in contents)
-
-            # The filelist should have been updated as well
-            self.assertFalse(u_filename in mm.filelist.files)
-
-    def test_manifest_is_read_with_utf8_encoding(self):
-        # Test for #303.
-        dist = Distribution(SETUP_ATTRS)
-        dist.script_name = 'setup.py'
-        cmd = sdist(dist)
-        cmd.ensure_finalized()
-
-        # Create manifest
-        quiet()
-        try:
-            cmd.run()
-        finally:
-            unquiet()
-
-        # Add UTF-8 filename to manifest
-        filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
-        cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
-        manifest = open(cmd.manifest, 'ab')
-        manifest.write(b('\n')+filename)
-        manifest.close()
-
-        # The file must exist to be included in the filelist
-        open(filename, 'w').close()
-
-        # Re-read manifest
-        cmd.filelist.files = []
-        quiet()
-        try:
-            cmd.read_manifest()
-        finally:
-            unquiet()
-
-        # The filelist should contain the UTF-8 filename
-        if sys.version_info >= (3,):
-            filename = filename.decode('utf-8')
-        self.assertTrue(filename in cmd.filelist.files)
-
-    # Python 3 only
-    if sys.version_info >= (3,):
-
-        def test_read_manifest_skips_non_utf8_filenames(self):
-            # Test for #303.
-            dist = Distribution(SETUP_ATTRS)
-            dist.script_name = 'setup.py'
-            cmd = sdist(dist)
-            cmd.ensure_finalized()
-
-            # Create manifest
-            quiet()
-            try:
-                cmd.run()
-            finally:
-                unquiet()
-
-            # Add Latin-1 filename to manifest
-            filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
-            cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
-            manifest = open(cmd.manifest, 'ab')
-            manifest.write(b('\n')+filename)
-            manifest.close()
-
-            # The file must exist to be included in the filelist
-            open(filename, 'w').close()
-
-            # Re-read manifest
-            cmd.filelist.files = []
-            quiet()
-            try:
-                try:
-                    cmd.read_manifest()
-                except UnicodeDecodeError, e:
-                    self.fail(e)
-            finally:
-                unquiet()
-
-            # The Latin-1 filename should have been skipped
-            filename = filename.decode('latin-1')
-            self.assertFalse(filename in cmd.filelist.files)
-
-    def test_sdist_with_utf8_encoded_filename(self):
-        # Test for #303.
-        dist = Distribution(SETUP_ATTRS)
-        dist.script_name = 'setup.py'
-        cmd = sdist(dist)
-        cmd.ensure_finalized()
-
-        # UTF-8 filename
-        filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
-        open(filename, 'w').close()
-
-        quiet()
-        try:
-            cmd.run()
-        finally:
-            unquiet()
-
-        if sys.platform == 'darwin':
-            filename = decompose(filename)
-
-        if sys.version_info >= (3,):
-            if sys.platform == 'win32':
-                # Python 3 mangles the UTF-8 filename
-                filename = filename.decode('cp1252')
-                self.assertTrue(filename in cmd.filelist.files)
-            else:
-                filename = filename.decode('utf-8')
-                self.assertTrue(filename in cmd.filelist.files)
-        else:
-            self.assertTrue(filename in cmd.filelist.files)
-
-    def test_sdist_with_latin1_encoded_filename(self):
-        # Test for #303.
-        dist = Distribution(SETUP_ATTRS)
-        dist.script_name = 'setup.py'
-        cmd = sdist(dist)
-        cmd.ensure_finalized()
-
-        # Latin-1 filename
-        filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
-        open(filename, 'w').close()
-
-        quiet()
-        try:
-            cmd.run()
-        finally:
-            unquiet()
-
-        if sys.version_info >= (3,):
-            filename = filename.decode('latin-1')
-            if sys.platform == 'win32':
-                # Latin-1 is similar to Windows-1252
-                self.assertTrue(filename in cmd.filelist.files)
-            else:
-                # The Latin-1 filename should have been skipped
-                self.assertFalse(filename in cmd.filelist.files)
-        else:
-            # No conversion takes place under Python 2 and the file
-            # is included. We shall keep it that way for BBB.
-            self.assertTrue(filename in cmd.filelist.files)
-
-
-def test_suite():
-    return unittest.defaultTestLoader.loadTestsFromName(__name__)
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_test.py b/vendor/distribute-0.6.34/setuptools/tests/test_test.py
deleted file mode 100644
index ad7cbd0f9695208923f12912621b42600503e78c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_test.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# -*- coding: UTF-8 -*- 
-
-"""develop tests
-"""
-import sys
-import os, shutil, tempfile, unittest
-import tempfile
-import site
-from StringIO import StringIO
-
-from distutils.errors import DistutilsError
-from setuptools.command.test import test
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.dist import Distribution
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo',
-    packages=['name', 'name.space', 'name.space.tests'],
-    namespace_packages=['name'],
-    test_suite='name.space.tests.test_suite',
-)
-"""
-
-NS_INIT = """# -*- coding: Latin-1 -*- 
-# Söme Arbiträry Ünicode to test Issüé 310
-try:
-    __import__('pkg_resources').declare_namespace(__name__)
-except ImportError:
-    from pkgutil import extend_path
-    __path__ = extend_path(__path__, __name__)
-"""
-# Make sure this is Latin-1 binary, before writing:
-if sys.version_info < (3,):
-    NS_INIT = NS_INIT.decode('UTF-8')
-NS_INIT = NS_INIT.encode('Latin-1')
-
-TEST_PY = """import unittest
-
-class TestTest(unittest.TestCase):
-    def test_test(self):
-        print "Foo" # Should fail under Python 3 unless 2to3 is used
-
-test_suite = unittest.makeSuite(TestTest)
-"""
-
-class TestTestTest(unittest.TestCase):
-
-    def setUp(self):
-        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
-            return
-
-        # Directory structure
-        self.dir = tempfile.mkdtemp()
-        os.mkdir(os.path.join(self.dir, 'name'))
-        os.mkdir(os.path.join(self.dir, 'name', 'space'))
-        os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests'))
-        # setup.py
-        setup = os.path.join(self.dir, 'setup.py')
-        f = open(setup, 'wt')
-        f.write(SETUP_PY)
-        f.close()
-        self.old_cwd = os.getcwd()
-        # name/__init__.py
-        init = os.path.join(self.dir, 'name', '__init__.py')
-        f = open(init, 'wb')
-        f.write(NS_INIT)
-        f.close()
-        # name/space/__init__.py
-        init = os.path.join(self.dir, 'name', 'space', '__init__.py')
-        f = open(init, 'wt')
-        f.write('#empty\n')
-        f.close()
-        # name/space/tests/__init__.py
-        init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py')
-        f = open(init, 'wt')
-        f.write(TEST_PY)
-        f.close()
-        
-        os.chdir(self.dir)
-        self.old_base = site.USER_BASE
-        site.USER_BASE = tempfile.mkdtemp()
-        self.old_site = site.USER_SITE
-        site.USER_SITE = tempfile.mkdtemp()
-
-    def tearDown(self):
-        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
-            return
-        
-        os.chdir(self.old_cwd)
-        shutil.rmtree(self.dir)
-        shutil.rmtree(site.USER_BASE)
-        shutil.rmtree(site.USER_SITE)
-        site.USER_BASE = self.old_base
-        site.USER_SITE = self.old_site
-
-    def test_test(self):
-        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
-            return
-        
-        dist = Distribution(dict(
-            name='foo',
-            packages=['name', 'name.space', 'name.space.tests'],
-            namespace_packages=['name'],
-            test_suite='name.space.tests.test_suite',
-            use_2to3=True,
-            ))
-        dist.script_name = 'setup.py'
-        cmd = test(dist)
-        cmd.user = 1
-        cmd.ensure_finalized()
-        cmd.install_dir = site.USER_SITE
-        cmd.user = 1
-        old_stdout = sys.stdout
-        sys.stdout = StringIO()
-        try:
-            try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements.
-                cmd.run()
-            except SystemExit: # The test runner calls sys.exit, stop that making an error.
-                pass
-        finally:
-            sys.stdout = old_stdout
-            
\ No newline at end of file
diff --git a/vendor/distribute-0.6.34/setuptools/tests/test_upload_docs.py b/vendor/distribute-0.6.34/setuptools/tests/test_upload_docs.py
deleted file mode 100644
index 769f16cc5a0ca2175d350a99724a4660dc6b17d2..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/test_upload_docs.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""build_ext tests
-"""
-import sys, os, shutil, tempfile, unittest, site, zipfile
-from setuptools.command.upload_docs import upload_docs
-from setuptools.dist import Distribution
-
-SETUP_PY = """\
-from setuptools import setup
-
-setup(name='foo')
-"""
-
-class TestUploadDocsTest(unittest.TestCase):
-    def setUp(self):
-        self.dir = tempfile.mkdtemp()
-        setup = os.path.join(self.dir, 'setup.py')
-        f = open(setup, 'w')
-        f.write(SETUP_PY)
-        f.close()
-        self.old_cwd = os.getcwd()
-        os.chdir(self.dir)
-
-        self.upload_dir = os.path.join(self.dir, 'build')
-        os.mkdir(self.upload_dir)
-
-        # A test document.
-        f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
-        f.write("Hello world.")
-        f.close()
-
-        # An empty folder.
-        os.mkdir(os.path.join(self.upload_dir, 'empty'))
-
-        if sys.version >= "2.6":
-            self.old_base = site.USER_BASE
-            site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
-            self.old_site = site.USER_SITE
-            site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
-
-    def tearDown(self):
-        os.chdir(self.old_cwd)
-        shutil.rmtree(self.dir)
-        if sys.version >= "2.6":
-            shutil.rmtree(site.USER_BASE)
-            shutil.rmtree(site.USER_SITE)
-            site.USER_BASE = self.old_base
-            site.USER_SITE = self.old_site
-
-    def test_create_zipfile(self):
-        # Test to make sure zipfile creation handles common cases.
-        # This explicitly includes a folder containing an empty folder.
-
-        dist = Distribution()
-
-        cmd = upload_docs(dist)
-        cmd.upload_dir = self.upload_dir
-        cmd.target_dir = self.upload_dir
-        tmp_dir = tempfile.mkdtemp()
-        tmp_file = os.path.join(tmp_dir, 'foo.zip')
-        try:
-            zip_file = cmd.create_zipfile(tmp_file)
-
-            assert zipfile.is_zipfile(tmp_file)
-
-            zip_file = zipfile.ZipFile(tmp_file) # woh...
-
-            assert zip_file.namelist() == ['index.html']
-
-            zip_file.close()
-        finally:
-            shutil.rmtree(tmp_dir)
-
diff --git a/vendor/distribute-0.6.34/setuptools/tests/win_script_wrapper.txt b/vendor/distribute-0.6.34/setuptools/tests/win_script_wrapper.txt
deleted file mode 100644
index 9f7c81d6b7d34ddda9111567db2a65ddeda0a745..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/setuptools/tests/win_script_wrapper.txt
+++ /dev/null
@@ -1,151 +0,0 @@
-Python Script Wrapper for Windows
-=================================
-
-setuptools includes wrappers for Python scripts that allows them to be
-executed like regular windows programs.  There are 2 wrappers, once
-for command-line programs, cli.exe, and one for graphica programs,
-gui.exe.  These programs are almost identical, function pretty much
-the same way, and are generated from the same source file.  The
-wrapper programs are used by copying them to the directory containing
-the script they are to wrap and with the same name as the script they
-are to wrap.  In the rest of this document, we'll give an example that
-will illustrate this.
-
-Let's create a simple script, foo-script.py:
-
-    >>> import os, sys, tempfile
-    >>> from setuptools.command.easy_install import nt_quote_arg
-    >>> sample_directory = tempfile.mkdtemp()
-    >>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w')
-    >>> f.write(
-    ... """#!%(python_exe)s
-    ... import sys
-    ... input = repr(sys.stdin.read())
-    ... print sys.argv[0][-14:]
-    ... print sys.argv[1:]
-    ... print input
-    ... if __debug__:
-    ...     print 'non-optimized'
-    ... """ % dict(python_exe=nt_quote_arg(sys.executable)))
-    >>> f.close()
-
-Note that the script starts with a Unix-style '#!' line saying which
-Python executable to run.  The wrapper will use this to find the
-correct Python executable.
-
-We'll also copy cli.exe to the sample-directory with the name foo.exe:
-
-    >>> import pkg_resources
-    >>> f = open(os.path.join(sample_directory, 'foo.exe'), 'wb')
-    >>> f.write(
-    ...     pkg_resources.resource_string('setuptools', 'cli.exe')
-    ...     )
-    >>> f.close()
-
-When the copy of cli.exe, foo.exe in this example, runs, it examines
-the path name it was run with and computes a Python script path name
-by removing the '.exe' suffic and adding the '-script.py' suffix. (For
-GUI programs, the suffix '-script-pyw' is added.)  This is why we
-named out script the way we did.  Now we can run out script by running
-the wrapper:
-
-    >>> import os
-    >>> input, output = os.popen4('"'+nt_quote_arg(os.path.join(sample_directory, 'foo.exe'))
-    ...               + r' arg1 "arg 2" "arg \"2\\\"" "arg 4\\" "arg5 a\\b"')
-    >>> input.write('hello\nworld\n')
-    >>> input.close()
-    >>> print output.read(),
-    \foo-script.py
-    ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
-    'hello\nworld\n'
-    non-optimized
-
-This example was a little pathological in that it exercised windows
-(MS C runtime) quoting rules:
-
-- Strings containing spaces are surrounded by double quotes.
-
-- Double quotes in strings need to be escaped by preceding them with
-  back slashes.
-
-- One or more backslashes preceding double quotes quotes need to be
-  escaped by preceding each of them them with back slashes.
-
-
-Specifying Python Command-line Options
---------------------------------------
-
-You can specify a single argument on the '#!' line.  This can be used
-to specify Python options like -O, to run in optimized mode or -i
-to start the interactive interpreter.  You can combine multiple
-options as usual. For example, to run in optimized mode and
-enter the interpreter after running the script, you could use -Oi:
-
-    >>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w')
-    >>> f.write(
-    ... """#!%(python_exe)s  -Oi  
-    ... import sys
-    ... input = repr(sys.stdin.read())
-    ... print sys.argv[0][-14:]
-    ... print sys.argv[1:]
-    ... print input
-    ... if __debug__:
-    ...     print 'non-optimized'
-    ... sys.ps1 = '---'
-    ... """ % dict(python_exe=nt_quote_arg(sys.executable)))
-    >>> f.close()
-
-    >>> input, output = os.popen4(nt_quote_arg(os.path.join(sample_directory, 'foo.exe')))
-    >>> input.close()
-    >>> print output.read(),
-    \foo-script.py
-    []
-    ''
-    ---
-
-Testing the GUI Version
------------------------
-
-Now let's test the GUI version with the simple scipt, bar-script.py:
-
-    >>> import os, sys, tempfile
-    >>> from setuptools.command.easy_install import nt_quote_arg
-    >>> sample_directory = tempfile.mkdtemp()
-    >>> f = open(os.path.join(sample_directory, 'bar-script.pyw'), 'w')
-    >>> f.write(
-    ... """#!%(python_exe)s
-    ... import sys
-    ... f = open(sys.argv[1], 'wb')
-    ... f.write(repr(sys.argv[2]))
-    ... f.close()
-    ... """ % dict(python_exe=nt_quote_arg(sys.executable)))
-    >>> f.close()
-
-We'll also copy gui.exe to the sample-directory with the name bar.exe:
-
-    >>> import pkg_resources
-    >>> f = open(os.path.join(sample_directory, 'bar.exe'), 'wb')
-    >>> f.write(
-    ...     pkg_resources.resource_string('setuptools', 'gui.exe')
-    ...     )
-    >>> f.close()
-
-Finally, we'll run the script and check the result:
-
-    >>> import os
-    >>> input, output = os.popen4('"'+nt_quote_arg(os.path.join(sample_directory, 'bar.exe'))
-    ...               + r' "%s" "Test Argument"' % os.path.join(sample_directory, 'test_output.txt'))
-    >>> input.close()
-    >>> print output.read()
-    <BLANKLINE>
-    >>> f = open(os.path.join(sample_directory, 'test_output.txt'), 'rb')
-    >>> print f.read()
-    'Test Argument'
-    >>> f.close()
-
-
-We're done with the sample_directory:
-
-    >>> import shutil
-    >>> shutil.rmtree(sample_directory)
-
diff --git a/vendor/distribute-0.6.34/site.py b/vendor/distribute-0.6.34/site.py
deleted file mode 100644
index a7166f1407adc86169b0c13aef44983e0c07d30c..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/site.py
+++ /dev/null
@@ -1,83 +0,0 @@
-def __boot():
-    import sys, os, os.path
-    PYTHONPATH = os.environ.get('PYTHONPATH')
-    if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
-        PYTHONPATH = []
-    else:
-        PYTHONPATH = PYTHONPATH.split(os.pathsep)
-
-    pic = getattr(sys,'path_importer_cache',{})
-    stdpath = sys.path[len(PYTHONPATH):]
-    mydir = os.path.dirname(__file__)
-    #print "searching",stdpath,sys.path
-
-    for item in stdpath:
-        if item==mydir or not item:
-            continue    # skip if current dir. on Windows, or my own directory
-        importer = pic.get(item)
-        if importer is not None:
-            loader = importer.find_module('site')
-            if loader is not None:
-                # This should actually reload the current module
-                loader.load_module('site')
-                break
-        else:
-            try:
-                import imp # Avoid import loop in Python >= 3.3
-                stream, path, descr = imp.find_module('site',[item])
-            except ImportError:
-                continue
-            if stream is None:
-                continue
-            try:
-                # This should actually reload the current module
-                imp.load_module('site',stream,path,descr)
-            finally:
-                stream.close()
-            break
-    else:
-        raise ImportError("Couldn't find the real 'site' module")
-
-    #print "loaded", __file__
-
-    known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
-
-    oldpos = getattr(sys,'__egginsert',0)   # save old insertion position
-    sys.__egginsert = 0                     # and reset the current one
-
-    for item in PYTHONPATH:
-        addsitedir(item)
-
-    sys.__egginsert += oldpos           # restore effective old position
-    
-    d,nd = makepath(stdpath[0])
-    insert_at = None
-    new_path = []
-
-    for item in sys.path:
-        p,np = makepath(item)
-
-        if np==nd and insert_at is None:
-            # We've hit the first 'system' path entry, so added entries go here
-            insert_at = len(new_path)
-
-        if np in known_paths or insert_at is None:
-            new_path.append(item)
-        else:
-            # new path after the insert point, back-insert it
-            new_path.insert(insert_at, item)
-            insert_at += 1
-            
-    sys.path[:] = new_path
-
-if __name__=='site':    
-    __boot()
-    del __boot
-    
-
-
-
-
-
-
-
diff --git a/vendor/distribute-0.6.34/tests/api_tests.txt b/vendor/distribute-0.6.34/tests/api_tests.txt
deleted file mode 100644
index 6cf6e66f27c2252d52812a7a62466a66371fe4a3..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/api_tests.txt
+++ /dev/null
@@ -1,330 +0,0 @@
-Pluggable Distributions of Python Software
-==========================================
-
-Distributions
--------------
-
-A "Distribution" is a collection of files that represent a "Release" of a
-"Project" as of a particular point in time, denoted by a
-"Version"::
-
-    >>> import sys, pkg_resources
-    >>> from pkg_resources import Distribution
-    >>> Distribution(project_name="Foo", version="1.2")
-    Foo 1.2
-
-Distributions have a location, which can be a filename, URL, or really anything
-else you care to use::
-
-    >>> dist = Distribution(
-    ...     location="http://example.com/something",
-    ...     project_name="Bar", version="0.9"
-    ... )
-
-    >>> dist
-    Bar 0.9 (http://example.com/something)
-
-
-Distributions have various introspectable attributes::
-
-    >>> dist.location
-    'http://example.com/something'
-
-    >>> dist.project_name
-    'Bar'
-
-    >>> dist.version
-    '0.9'
-
-    >>> dist.py_version == sys.version[:3]
-    True
-
-    >>> print dist.platform
-    None
-
-Including various computed attributes::
-
-    >>> from pkg_resources import parse_version
-    >>> dist.parsed_version == parse_version(dist.version)
-    True
-
-    >>> dist.key    # case-insensitive form of the project name
-    'bar'
-
-Distributions are compared (and hashed) by version first::
-
-    >>> Distribution(version='1.0') == Distribution(version='1.0')
-    True
-    >>> Distribution(version='1.0') == Distribution(version='1.1')
-    False
-    >>> Distribution(version='1.0') <  Distribution(version='1.1')
-    True
-
-but also by project name (case-insensitive), platform, Python version,
-location, etc.::
-
-    >>> Distribution(project_name="Foo",version="1.0") == \
-    ... Distribution(project_name="Foo",version="1.0")
-    True
-
-    >>> Distribution(project_name="Foo",version="1.0") == \
-    ... Distribution(project_name="foo",version="1.0")
-    True
-
-    >>> Distribution(project_name="Foo",version="1.0") == \
-    ... Distribution(project_name="Foo",version="1.1")
-    False
-
-    >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
-    ... Distribution(project_name="Foo",py_version="2.4",version="1.0")
-    False
-
-    >>> Distribution(location="spam",version="1.0") == \
-    ... Distribution(location="spam",version="1.0")
-    True
-
-    >>> Distribution(location="spam",version="1.0") == \
-    ... Distribution(location="baz",version="1.0")
-    False
-
-
-
-Hash and compare distribution by prio/plat
-
-Get version from metadata
-provider capabilities
-egg_name()
-as_requirement()
-from_location, from_filename (w/path normalization)
-
-Releases may have zero or more "Requirements", which indicate
-what releases of another project the release requires in order to
-function.  A Requirement names the other project, expresses some criteria
-as to what releases of that project are acceptable, and lists any "Extras"
-that the requiring release may need from that project.  (An Extra is an
-optional feature of a Release, that can only be used if its additional
-Requirements are satisfied.)
-
-
-
-The Working Set
----------------
-
-A collection of active distributions is called a Working Set.  Note that a
-Working Set can contain any importable distribution, not just pluggable ones.
-For example, the Python standard library is an importable distribution that
-will usually be part of the Working Set, even though it is not pluggable.
-Similarly, when you are doing development work on a project, the files you are
-editing are also a Distribution.  (And, with a little attention to the
-directory names used,  and including some additional metadata, such a
-"development distribution" can be made pluggable as well.)
-
-    >>> from pkg_resources import WorkingSet, VersionConflict
-
-A working set's entries are the sys.path entries that correspond to the active
-distributions.  By default, the working set's entries are the items on
-``sys.path``::
-
-    >>> ws = WorkingSet()
-    >>> ws.entries == sys.path
-    True
-
-But you can also create an empty working set explicitly, and add distributions
-to it::
-
-    >>> ws = WorkingSet([])
-    >>> ws.add(dist)
-    >>> ws.entries
-    ['http://example.com/something']
-    >>> dist in ws
-    True
-    >>> Distribution('foo',version="") in ws
-    False
-
-And you can iterate over its distributions::
-
-    >>> list(ws)
-    [Bar 0.9 (http://example.com/something)]
-
-Adding the same distribution more than once is a no-op::
-
-    >>> ws.add(dist)
-    >>> list(ws)
-    [Bar 0.9 (http://example.com/something)]
-
-For that matter, adding multiple distributions for the same project also does
-nothing, because a working set can only hold one active distribution per
-project -- the first one added to it::
-
-    >>> ws.add(
-    ...     Distribution(
-    ...         'http://example.com/something', project_name="Bar",
-    ...         version="7.2"
-    ...     )
-    ... )
-    >>> list(ws)
-    [Bar 0.9 (http://example.com/something)]
-
-You can append a path entry to a working set using ``add_entry()``::
-
-    >>> ws.entries
-    ['http://example.com/something']
-    >>> ws.add_entry(pkg_resources.__file__)
-    >>> ws.entries == ['http://example.com/something', pkg_resources.__file__]
-    True
-
-Multiple additions result in multiple entries, even if the entry is already in
-the working set (because ``sys.path`` can contain the same entry more than
-once)::
-
-    >>> ws.add_entry(pkg_resources.__file__)
-    >>> ws.entries
-    ['...example.com...', '...pkg_resources...', '...pkg_resources...']
-
-And you can specify the path entry a distribution was found under, using the
-optional second parameter to ``add()``::
-
-    >>> ws = WorkingSet([])
-    >>> ws.add(dist,"foo")
-    >>> ws.entries
-    ['foo']
-
-But even if a distribution is found under multiple path entries, it still only
-shows up once when iterating the working set:
-
-    >>> ws.add_entry(ws.entries[0])
-    >>> list(ws)
-    [Bar 0.9 (http://example.com/something)]
-
-You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
-
-    >>> from pkg_resources import Requirement
-    >>> print ws.find(Requirement.parse("Foo==1.0"))    # no match, return None
-    None
-
-    >>> ws.find(Requirement.parse("Bar==0.9"))  # match, return distribution
-    Bar 0.9 (http://example.com/something)
-
-Note that asking for a conflicting version of a distribution already in a
-working set triggers a ``pkg_resources.VersionConflict`` error:
-
-    >>> try:
-    ...     ws.find(Requirement.parse("Bar==1.0"))
-    ... except VersionConflict:
-    ...     print 'ok'
-    ok
-
-You can subscribe a callback function to receive notifications whenever a new
-distribution is added to a working set.  The callback is immediately invoked
-once for each existing distribution in the working set, and then is called
-again for new distributions added thereafter::
-
-    >>> def added(dist): print "Added", dist
-    >>> ws.subscribe(added)
-    Added Bar 0.9
-    >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") 
-    >>> ws.add(foo12)
-    Added Foo 1.2
-
-Note, however, that only the first distribution added for a given project name
-will trigger a callback, even during the initial ``subscribe()`` callback::
-
-    >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") 
-    >>> ws.add(foo14)   # no callback, because Foo 1.2 is already active
-
-    >>> ws = WorkingSet([])
-    >>> ws.add(foo12)
-    >>> ws.add(foo14)
-    >>> ws.subscribe(added)
-    Added Foo 1.2
-    
-And adding a callback more than once has no effect, either::
-
-    >>> ws.subscribe(added)     # no callbacks
-
-    # and no double-callbacks on subsequent additions, either
-    >>> just_a_test = Distribution(project_name="JustATest", version="0.99")
-    >>> ws.add(just_a_test)
-    Added JustATest 0.99
-
-
-Finding Plugins
----------------
-
-``WorkingSet`` objects can be used to figure out what plugins in an
-``Environment`` can be loaded without any resolution errors::
-
-    >>> from pkg_resources import Environment
-
-    >>> plugins = Environment([])   # normally, a list of plugin directories
-    >>> plugins.add(foo12)
-    >>> plugins.add(foo14)
-    >>> plugins.add(just_a_test)
-    
-In the simplest case, we just get the newest version of each distribution in
-the plugin environment::
-
-    >>> ws = WorkingSet([])
-    >>> ws.find_plugins(plugins)
-    ([JustATest 0.99, Foo 1.4 (f14)], {})
-
-But if there's a problem with a version conflict or missing requirements, the
-method falls back to older versions, and the error info dict will contain an
-exception instance for each unloadable plugin::
-
-    >>> ws.add(foo12)   # this will conflict with Foo 1.4
-    >>> ws.find_plugins(plugins)
-    ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
-
-But if you disallow fallbacks, the failed plugin will be skipped instead of
-trying older versions::
-
-    >>> ws.find_plugins(plugins, fallback=False)
-    ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
-
-
-
-Platform Compatibility Rules
-----------------------------
-
-On the Mac, there are potential compatibility issues for modules compiled
-on newer versions of Mac OS X than what the user is running. Additionally,
-Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
-
-Basic equality works as on other platforms::
-
-    >>> from pkg_resources import compatible_platforms as cp
-    >>> reqd = 'macosx-10.4-ppc'
-    >>> cp(reqd, reqd)
-    True
-    >>> cp("win32", reqd)
-    False
-
-Distributions made on other machine types are not compatible::
-
-    >>> cp("macosx-10.4-i386", reqd)
-    False
-
-Distributions made on earlier versions of the OS are compatible, as
-long as they are from the same top-level version. The patchlevel version
-number does not matter::
-
-    >>> cp("macosx-10.4-ppc", reqd)
-    True
-    >>> cp("macosx-10.3-ppc", reqd)
-    True
-    >>> cp("macosx-10.5-ppc", reqd)
-    False
-    >>> cp("macosx-9.5-ppc", reqd)
-    False
-
-Backwards compatibility for packages made via earlier versions of 
-setuptools is provided as well::
-
-    >>> cp("darwin-8.2.0-Power_Macintosh", reqd)
-    True
-    >>> cp("darwin-7.2.0-Power_Macintosh", reqd)
-    True
-    >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
-    False
-
diff --git a/vendor/distribute-0.6.34/tests/install_test.py b/vendor/distribute-0.6.34/tests/install_test.py
deleted file mode 100644
index 02deb81860c8d0c218157c79d3f357b73b671e77..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/install_test.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import urllib2
-import sys
-import os
-
-if os.path.exists('distribute_setup.py'):
-    print 'distribute_setup.py exists in the current dir, aborting'
-    sys.exit(2)
-
-print '**** Starting Test'
-print '\n\n'
-
-is_jython = sys.platform.startswith('java')
-if is_jython:
-    import subprocess
-
-print 'Downloading bootstrap'
-file = urllib2.urlopen('http://nightly.ziade.org/distribute_setup.py')
-f = open('distribute_setup.py', 'w')
-f.write(file.read())
-f.close()
-
-# running it
-args = [sys.executable]  + ['distribute_setup.py']
-if is_jython:
-    res = subprocess.call(args)
-else:
-    res = os.spawnv(os.P_WAIT, sys.executable, args)
-
-if res != 0:
-    print '**** Test failed, please send me the output at tarek@ziade.org'
-    os.remove('distribute_setup.py')
-    sys.exit(2)
-
-# now checking if Distribute is installed
-script = """\
-import sys
-try:
-    import setuptools
-except ImportError:
-    sys.exit(0)
-
-sys.exit(hasattr(setuptools, "_distribute"))
-"""
-
-root = 'script'
-seed = 0
-script_name = '%s%d.py' % (root, seed)
-
-while os.path.exists(script_name):
-    seed += 1
-    script_name = '%s%d.py' % (root, seed)
-
-f = open(script_name, 'w')
-try:
-    f.write(script)
-finally:
-    f.close()
-
-try:
-    args = [sys.executable]  + [script_name]
-    if is_jython:
-        res = subprocess.call(args)
-    else:
-        res = os.spawnv(os.P_WAIT, sys.executable, args)
-
-    print '\n\n'
-    if res:
-        print '**** Test is OK'
-    else:
-        print '**** Test failed, please send me the output at tarek@ziade.org'
-finally:
-    if os.path.exists(script_name):
-        os.remove(script_name)
-    os.remove('distribute_setup.py')
-
diff --git a/vendor/distribute-0.6.34/tests/manual_test.py b/vendor/distribute-0.6.34/tests/manual_test.py
deleted file mode 100644
index 0d5051f1656e7a451a6da7f6a4f5503d03e1b81f..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/manual_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-import sys
-
-if sys.version_info[0] >= 3:
-    raise NotImplementedError('Py3 not supported in this test yet')
-
-import os
-import shutil
-import tempfile
-from distutils.command.install import INSTALL_SCHEMES
-from string import Template
-from urllib2 import urlopen
-
-try:
-    import subprocess
-    def _system_call(*args):
-        assert subprocess.call(args) == 0
-except ImportError:
-    # Python 2.3
-    def _system_call(*args):
-        # quoting arguments if windows
-        if sys.platform == 'win32':
-            def quote(arg):
-                if ' ' in arg:
-                    return '"%s"' % arg
-                return arg
-            args = [quote(arg) for arg in args]
-        assert os.system(' '.join(args)) == 0
-
-def tempdir(func):
-    def _tempdir(*args, **kwargs):
-        test_dir = tempfile.mkdtemp()
-        old_dir = os.getcwd()
-        os.chdir(test_dir)
-        try:
-            return func(*args, **kwargs)
-        finally:
-            os.chdir(old_dir)
-            shutil.rmtree(test_dir)
-    return _tempdir
-
-SIMPLE_BUILDOUT = """\
-[buildout]
-
-parts = eggs
-
-[eggs]
-recipe = zc.recipe.egg
-
-eggs =
-    extensions
-"""
-
-BOOTSTRAP = 'http://python-distribute.org/bootstrap.py'
-PYVER = sys.version.split()[0][:3]
-DEV_URL = 'http://bitbucket.org/tarek/distribute/get/0.6-maintenance.zip#egg=distribute-dev'
-
-_VARS = {'base': '.',
-         'py_version_short': PYVER}
-
-if sys.platform == 'win32':
-    PURELIB = INSTALL_SCHEMES['nt']['purelib']
-else:
-    PURELIB = INSTALL_SCHEMES['unix_prefix']['purelib']
-
-
-@tempdir
-def test_virtualenv():
-    """virtualenv with distribute"""
-    purelib = os.path.abspath(Template(PURELIB).substitute(**_VARS))
-    _system_call('virtualenv', '--no-site-packages', '.', '--distribute')
-    _system_call('bin/easy_install', 'distribute==dev')
-    # linux specific
-    site_pkg = os.listdir(purelib)
-    site_pkg.sort()
-    assert 'distribute' in site_pkg[0]
-    easy_install = os.path.join(purelib, 'easy-install.pth')
-    with open(easy_install) as f:
-        res = f.read()
-    assert 'distribute' in res
-    assert 'setuptools' not in res
-
-@tempdir
-def test_full():
-    """virtualenv + pip + buildout"""
-    _system_call('virtualenv', '--no-site-packages', '.')
-    _system_call('bin/easy_install', '-q', 'distribute==dev')
-    _system_call('bin/easy_install', '-qU', 'distribute==dev')
-    _system_call('bin/easy_install', '-q', 'pip')
-    _system_call('bin/pip', 'install', '-q', 'zc.buildout')
-
-    with open('buildout.cfg', 'w') as f:
-        f.write(SIMPLE_BUILDOUT)
-
-    with open('bootstrap.py', 'w') as f:
-        f.write(urlopen(BOOTSTRAP).read())
-
-    _system_call('bin/python', 'bootstrap.py', '--distribute')
-    _system_call('bin/buildout', '-q')
-    eggs = os.listdir('eggs')
-    eggs.sort()
-    assert len(eggs) == 3
-    assert eggs[0].startswith('distribute')
-    assert eggs[1:] == ['extensions-0.3-py2.6.egg',
-                        'zc.recipe.egg-1.2.2-py2.6.egg']
-
-if __name__ == '__main__':
-    test_virtualenv()
-    test_full()
-
diff --git a/vendor/distribute-0.6.34/tests/shlib_test/hello.c b/vendor/distribute-0.6.34/tests/shlib_test/hello.c
deleted file mode 100644
index 9998372ccd4bdd2b0a9e4dc8b9af0ec3a319e423..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/shlib_test/hello.c
+++ /dev/null
@@ -1,168 +0,0 @@
-/* Generated by Pyrex 0.9.3 on Thu Jan 05 17:47:12 2006 */
-
-#include "Python.h"
-#include "structmember.h"
-#ifndef PY_LONG_LONG
-  #define PY_LONG_LONG LONG_LONG
-#endif
-
-
-typedef struct {PyObject **p; char *s;} __Pyx_InternTabEntry; /*proto*/
-typedef struct {PyObject **p; char *s; long n;} __Pyx_StringTabEntry; /*proto*/
-static PyObject *__Pyx_UnpackItem(PyObject *, int); /*proto*/
-static int __Pyx_EndUnpack(PyObject *, int); /*proto*/
-static int __Pyx_PrintItem(PyObject *); /*proto*/
-static int __Pyx_PrintNewline(void); /*proto*/
-static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
-static void __Pyx_ReRaise(void); /*proto*/
-static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/
-static PyObject *__Pyx_GetExcValue(void); /*proto*/
-static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name); /*proto*/
-static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/
-static int __Pyx_GetStarArgs(PyObject **args, PyObject **kwds, char *kwd_list[], int nargs, PyObject **args2, PyObject **kwds2); /*proto*/
-static void __Pyx_WriteUnraisable(char *name); /*proto*/
-static void __Pyx_AddTraceback(char *funcname); /*proto*/
-static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size);  /*proto*/
-static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
-static int __Pyx_GetVtable(PyObject *dict, void *vtabptr); /*proto*/
-static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, char *modname); /*proto*/
-static int __Pyx_InternStrings(__Pyx_InternTabEntry *t); /*proto*/
-static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
-static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
-
-static PyObject *__pyx_m;
-static PyObject *__pyx_b;
-static int __pyx_lineno;
-static char *__pyx_filename;
-staticforward char **__pyx_f;
-
-/* Declarations from hello */
-
-char (*(get_hello_msg(void))); /*proto*/
-
-/* Implementation of hello */
-
-static PyObject *__pyx_n_hello;
-
-static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
-static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
-  PyObject *__pyx_r;
-  PyObject *__pyx_1 = 0;
-  static char *__pyx_argnames[] = {0};
-  if (!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "", __pyx_argnames)) return 0;
-
-  /* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":4 */
-  __pyx_1 = PyString_FromString(get_hello_msg()); if (!__pyx_1) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; goto __pyx_L1;}
-  __pyx_r = __pyx_1;
-  __pyx_1 = 0;
-  goto __pyx_L0;
-
-  __pyx_r = Py_None; Py_INCREF(__pyx_r);
-  goto __pyx_L0;
-  __pyx_L1:;
-  Py_XDECREF(__pyx_1);
-  __Pyx_AddTraceback("hello.hello");
-  __pyx_r = 0;
-  __pyx_L0:;
-  return __pyx_r;
-}
-
-static __Pyx_InternTabEntry __pyx_intern_tab[] = {
-  {&__pyx_n_hello, "hello"},
-  {0, 0}
-};
-
-static struct PyMethodDef __pyx_methods[] = {
-  {"hello", (PyCFunction)__pyx_f_5hello_hello, METH_VARARGS|METH_KEYWORDS, 0},
-  {0, 0, 0, 0}
-};
-
-DL_EXPORT(void) inithello(void); /*proto*/
-DL_EXPORT(void) inithello(void) {
-  __pyx_m = Py_InitModule4("hello", __pyx_methods, 0, 0, PYTHON_API_VERSION);
-  if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
-  __pyx_b = PyImport_AddModule("__builtin__");
-  if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
-  if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
-  if (__Pyx_InternStrings(__pyx_intern_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
-
-  /* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":3 */
-  return;
-  __pyx_L1:;
-  __Pyx_AddTraceback("hello");
-}
-
-static char *__pyx_filenames[] = {
-  "hello.pyx",
-};
-statichere char **__pyx_f = __pyx_filenames;
-
-/* Runtime support code */
-
-static int __Pyx_InternStrings(__Pyx_InternTabEntry *t) {
-    while (t->p) {
-        *t->p = PyString_InternFromString(t->s);
-        if (!*t->p)
-            return -1;
-        ++t;
-    }
-    return 0;
-}
-
-#include "compile.h"
-#include "frameobject.h"
-#include "traceback.h"
-
-static void __Pyx_AddTraceback(char *funcname) {
-    PyObject *py_srcfile = 0;
-    PyObject *py_funcname = 0;
-    PyObject *py_globals = 0;
-    PyObject *empty_tuple = 0;
-    PyObject *empty_string = 0;
-    PyCodeObject *py_code = 0;
-    PyFrameObject *py_frame = 0;
-    
-    py_srcfile = PyString_FromString(__pyx_filename);
-    if (!py_srcfile) goto bad;
-    py_funcname = PyString_FromString(funcname);
-    if (!py_funcname) goto bad;
-    py_globals = PyModule_GetDict(__pyx_m);
-    if (!py_globals) goto bad;
-    empty_tuple = PyTuple_New(0);
-    if (!empty_tuple) goto bad;
-    empty_string = PyString_FromString("");
-    if (!empty_string) goto bad;
-    py_code = PyCode_New(
-        0,            /*int argcount,*/
-        0,            /*int nlocals,*/
-        0,            /*int stacksize,*/
-        0,            /*int flags,*/
-        empty_string, /*PyObject *code,*/
-        empty_tuple,  /*PyObject *consts,*/
-        empty_tuple,  /*PyObject *names,*/
-        empty_tuple,  /*PyObject *varnames,*/
-        empty_tuple,  /*PyObject *freevars,*/
-        empty_tuple,  /*PyObject *cellvars,*/
-        py_srcfile,   /*PyObject *filename,*/
-        py_funcname,  /*PyObject *name,*/
-        __pyx_lineno,   /*int firstlineno,*/
-        empty_string  /*PyObject *lnotab*/
-    );
-    if (!py_code) goto bad;
-    py_frame = PyFrame_New(
-        PyThreadState_Get(), /*PyThreadState *tstate,*/
-        py_code,             /*PyCodeObject *code,*/
-        py_globals,          /*PyObject *globals,*/
-        0                    /*PyObject *locals*/
-    );
-    if (!py_frame) goto bad;
-    py_frame->f_lineno = __pyx_lineno;
-    PyTraceBack_Here(py_frame);
-bad:
-    Py_XDECREF(py_srcfile);
-    Py_XDECREF(py_funcname);
-    Py_XDECREF(empty_tuple);
-    Py_XDECREF(empty_string);
-    Py_XDECREF(py_code);
-    Py_XDECREF(py_frame);
-}
diff --git a/vendor/distribute-0.6.34/tests/shlib_test/hello.pyx b/vendor/distribute-0.6.34/tests/shlib_test/hello.pyx
deleted file mode 100644
index 58ce6919a2b6e12ae30b6f238b9fb4b50c56a517..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/shlib_test/hello.pyx
+++ /dev/null
@@ -1,4 +0,0 @@
-cdef extern char *get_hello_msg()
-
-def hello():
-    return get_hello_msg()
diff --git a/vendor/distribute-0.6.34/tests/shlib_test/hellolib.c b/vendor/distribute-0.6.34/tests/shlib_test/hellolib.c
deleted file mode 100644
index 88d65cee923688fbd2293aa1d0f62df13febf7b3..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/shlib_test/hellolib.c
+++ /dev/null
@@ -1,3 +0,0 @@
-extern char* get_hello_msg() {
-    return "Hello, world!";
-}
diff --git a/vendor/distribute-0.6.34/tests/shlib_test/setup.py b/vendor/distribute-0.6.34/tests/shlib_test/setup.py
deleted file mode 100644
index b0c93996f3265bdef0eb5b746de6ca1e96052a90..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/shlib_test/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from setuptools import setup, Extension, Library
-
-setup(
-    name="shlib_test",
-    ext_modules = [
-        Library("hellolib", ["hellolib.c"]),
-        Extension("hello", ["hello.pyx"], libraries=["hellolib"])
-    ],
-    test_suite="test_hello.HelloWorldTest",
-)
diff --git a/vendor/distribute-0.6.34/tests/shlib_test/test_hello.py b/vendor/distribute-0.6.34/tests/shlib_test/test_hello.py
deleted file mode 100644
index 6da02e31d450ea669727d47e34c1d624cb97ce2b..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/shlib_test/test_hello.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from unittest import TestCase
-
-class HelloWorldTest(TestCase):
-    def testHelloMsg(self):
-        from hello import hello
-        self.assertEqual(hello(), "Hello, world!")
-
diff --git a/vendor/distribute-0.6.34/tests/test_distribute_setup.py b/vendor/distribute-0.6.34/tests/test_distribute_setup.py
deleted file mode 100644
index 1f3da058d14bea737ec5776841d2ce43b1344c4e..0000000000000000000000000000000000000000
--- a/vendor/distribute-0.6.34/tests/test_distribute_setup.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import sys
-import os
-import tempfile
-import unittest
-import shutil
-import copy
-
-CURDIR = os.path.abspath(os.path.dirname(__file__))
-TOPDIR = os.path.split(CURDIR)[0]
-sys.path.insert(0, TOPDIR)
-
-from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
-                              _do_download, _install, DEFAULT_URL,
-                              DEFAULT_VERSION)
-import distribute_setup
-
-class TestSetup(unittest.TestCase):
-
-    def urlopen(self, url):
-        return open(self.tarball)
-
-    def setUp(self):
-        self.old_sys_path = copy.copy(sys.path)
-        self.cwd = os.getcwd()
-        self.tmpdir = tempfile.mkdtemp()
-        os.chdir(TOPDIR)
-        _python_cmd("setup.py", "-q", "egg_info", "-RDb", "''", "sdist",
-                    "--dist-dir", "%s" % self.tmpdir)
-        tarball = os.listdir(self.tmpdir)[0]
-        self.tarball = os.path.join(self.tmpdir, tarball)
-        import urllib2
-        urllib2.urlopen = self.urlopen
-
-    def tearDown(self):
-        shutil.rmtree(self.tmpdir)
-        os.chdir(self.cwd)
-        sys.path = copy.copy(self.old_sys_path)
-
-    def test_build_egg(self):
-        # making it an egg
-        egg = _build_egg(self.tarball, self.tmpdir)
-
-        # now trying to import it
-        sys.path[0] = egg
-        import setuptools
-        self.assertTrue(setuptools.__file__.startswith(egg))
-
-    def test_do_download(self):
-        tmpdir = tempfile.mkdtemp()
-        _do_download(DEFAULT_VERSION, DEFAULT_URL, tmpdir, 1)
-        import setuptools
-        self.assertTrue(setuptools.bootstrap_install_from.startswith(tmpdir))
-
-    def test_install(self):
-        def _faked(*args):
-            return True
-        distribute_setup.python_cmd = _faked
-        _install(self.tarball)
-
-    def test_use_setuptools(self):
-        self.assertEqual(use_setuptools(), None)
-
-        # make sure fake_setuptools is not called by default
-        import pkg_resources
-        del pkg_resources._distribute
-        def fake_setuptools(*args):
-            raise AssertionError
-
-        pkg_resources._fake_setuptools = fake_setuptools
-        use_setuptools()
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/vendor/pip-1.2.1.tar.gz b/vendor/pip-1.2.1.tar.gz
deleted file mode 100644
index 462c16b5f8c20af6d0c9485be56c038a8780e6c8..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1.tar.gz and /dev/null differ
diff --git a/vendor/pip-1.2.1/.gitignore b/vendor/pip-1.2.1/.gitignore
deleted file mode 100644
index 0fe94900b6bc216daf28865cf1231a117f3efa0d..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/.gitignore
+++ /dev/null
@@ -1,23 +0,0 @@
-MANIFEST
-tests/test-scratch/*
-tests/test-cache/*
-tests/packages/FSPkg/FSPkg.egg-info
-testenv
-pip.egg-info/*
-ScriptTest-*.egg
-virtualenv-*.egg
-nose-*.egg/*
-wsgi_intercept-*.egg/*
-WSGIProxy-*.egg/*
-WebOb-*.egg
-Paste-*.egg/*
-mock-*egg
-tests/tests_cache/*
-dist/*
-docs/_build/*
-build/*
-*.pyc
-*.pyo 
-pip-log.txt
-pip.log
-*.~
diff --git a/vendor/pip-1.2.1/AUTHORS.txt b/vendor/pip-1.2.1/AUTHORS.txt
deleted file mode 100644
index 5fa57113de4717dd0fcdb4f21f865d6a8b41ce8f..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/AUTHORS.txt
+++ /dev/null
@@ -1,48 +0,0 @@
-Alex Grönholm
-Alex Morega
-Alexandre Conrad
-Antti Kaihola
-Armin Ronacher
-Brian Rosner
-Carl Meyer
-Christian Oudard
-Cody Soyland
-Daniel Holth
-Dave Abrahams
-Francesco
-Hugo Lopes Tavares
-Ian Bicking
-Igor Sobreira
-Ionel Maries Cristian
-Jakub Vysoky
-Jannis Leidel
-Jay Graves
-John-Scott Atlakson
-Jon Parise
-Josh Bronson
-Kelsey Hightower
-Kenneth Belitzky
-Kumar McMillan
-Luke Macken
-Masklinn
-Marc Abramowitz
-Marcus Smith
-Matt Maker
-Nowell Strite
-Oliver Tonnhofer
-Olivier Girardot
-Patrick Jenkins
-Paul Nasrat
-Paul Oswald
-Paul van der Linden
-Peter Waller
-Piet Delport
-Qiangning Hong
-Rene Dudfield
-Ronny Pfannschmidt
-Simon Cross
-Stavros Korokithakis
-Thomas Johansson
-Vinay Sajip
-Vitaly Babiy
-Wil Tan
diff --git a/vendor/pip-1.2.1/LICENSE.txt b/vendor/pip-1.2.1/LICENSE.txt
deleted file mode 100644
index 7951a03258d8a8928d5c049ef3399c396a338a85..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/LICENSE.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-Copyright (c) 2008-2011 The pip developers (see AUTHORS.txt file)
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/pip-1.2.1/MANIFEST.in b/vendor/pip-1.2.1/MANIFEST.in
deleted file mode 100644
index add9bf16054afe511d3beabddc25702a52c85ba8..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/MANIFEST.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include AUTHORS.txt
-include LICENSE.txt
-recursive-include docs *.txt
-recursive-include docs *.html
-recursive-exclude docs/_build *.txt
-prune docs/_build/_sources
diff --git a/vendor/pip-1.2.1/contrib/build-installer b/vendor/pip-1.2.1/contrib/build-installer
deleted file mode 100755
index 8a0e1af261ae4ebf213b2d785064c51e7a099fc9..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/contrib/build-installer
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-from packager import generate_script
-
-here = os.path.dirname(os.path.abspath(__file__))
-file_name = os.path.join(here, 'get-pip.py')
-
-entry = """
-import sys
-try:
-    import setuptools
-    import pkg_resources
-except ImportError:
-    raise SystemExit("An error occured while trying to run %s. Make sure "
-                     "you have setuptools or distribute installed." % __file__)
-import pip
-pip.bootstrap()
-"""
-
-def main():
-    sys.stdout.write("Creating pip bootstrapper...")
-    script = generate_script(entry, ['pip'])
-    f = open(file_name, 'w')
-    try:
-        f.write(script)
-    finally:
-        f.close()
-    sys.stdout.write('done.\n')
-    if hasattr(os, 'chmod'):
-        oldmode = os.stat(file_name).st_mode & 07777
-        newmode = (oldmode | 0555) & 07777
-        os.chmod(file_name, newmode)
-        sys.stdout.write('Made resulting file %s executable.\n\n' % file_name)
-
-if __name__ == '__main__':
-    main()
diff --git a/vendor/pip-1.2.1/contrib/build-standalone b/vendor/pip-1.2.1/contrib/build-standalone
deleted file mode 100755
index b2a1aebe185d0c338e7ebb2bdef6e0f4d64c6cc8..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/contrib/build-standalone
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-from packager import generate_script
-
-here = os.path.dirname(os.path.abspath(__file__))
-file_name = os.path.join(here, 'run-pip.py')
-
-entry = """
-import sys
-try:
-    import setuptools
-    import pkg_resources
-except ImportError:
-    raise SystemExit("An error occured while trying to run %s. Make sure "
-                     "you have setuptools or distribute installed." % __file__)
-import pip
-pip.main()
-"""
-
-def main():
-    sys.stdout.write("Creating standalone pip...")
-    script = generate_script(entry, ['pip'])
-    f = open(file_name, 'w')
-    try:
-        f.write(script)
-    finally:
-        f.close()
-    sys.stdout.write('done.\n')
-    if hasattr(os, 'chmod'):
-        oldmode = os.stat(file_name).st_mode & 07777
-        newmode = (oldmode | 0555) & 07777
-        os.chmod(file_name, newmode)
-        sys.stdout.write('Made resulting file %s executable.\n\n' % file_name)
-
-if __name__ == '__main__':
-    main()
diff --git a/vendor/pip-1.2.1/contrib/get-pip.py b/vendor/pip-1.2.1/contrib/get-pip.py
deleted file mode 100755
index 9fd5b3039f8bbf76829ef07010a6ab3346dc9b8a..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/contrib/get-pip.py
+++ /dev/null
@@ -1,1153 +0,0 @@
-#! /usr/bin/env python
-
-sources = """
-eNrsvVt7HEmWGDaSba1VuqyllSX704NzwMFmJlmVbHIusjBTzeGQ4DQ17CZFkD2aBTA1iaoEkIuq
-ykJmFsDq2fbn3+AnP/hX+E/5we/+7DefS9wjMqtAcnalT5r9tonKjDwRceLEiXNOnMv/+ve/v/5B
-8v6frMpVNq+meVtWy+b67737d3/2gx/s7e29ko+i28uiLqLbIppX1VV0XtXRtFqelxfNMCqXTZvP
-51HTrs/Ph1HRTuHLwaBcrKq6jZpNI/+smsF5XS0i7Owsn17d5vVsWi1WeRuJFhdFO1lt2stqOZmX
-Z4PBYFacR/V6uSyXF5P1clbUk5uybtf5vFjeJOnBIIL/YW/479uiXdfL6F29LqLyHMYaw4jFxzjI
-clZEeaS/H0Yv8nlTRFULc7stmyIbWPBqhneZN3nb1glMZBgByHw+WdXFefkhTmGA5fbx3bsXvXj5
-H74+PIjKJjpbl/PZQxjHRVXNomW+KJ5QI3ouAEdjQFUGaLnM/roql9hzxm9gANQQusaPmnq6yyfQ
-DD4oYK7ugNpLGFLZLOMWMVPUGx4WYD1fz9utA4MfsGLT21mS7jwy+xse2uBeRMiLvs6n0euj6D9E
-D4x1ivRkEIPLqo1WdbUq6vkG1qip5jfFDCCULb5tqkUBs4IVn5dXRfQQO37YVg+Zqh6elcuHWTbo
-mFR+1uC/ifk6HQTnIpvqlzQNwms0qwrG6XRarZctbZcbnMm8XF4Vs6itoot5dZbDninbYrSCvZBf
-FM1ggD8n8if0Zu+HJB2sGyCxWVkbIyk+rPLlDF8k8f8CyASKJITN8xb6XUTjcRTflssfP4559QEF
-ALOXYI6mdblqG7GW95gGqnUbQS8NLHS5jKNqGf22XM6q2wamUzERQ9e4OnpkZdM2CXcoNsOOI8Au
-uHd7xoDEsq6WSEJJ/PTNm+dP3z2Nh6pRCoN9D1t6X7zaJ9TXVb4AiiBwgrQnTVvVgGQHlzQQCQyG
-AZxKDEN+x1xvcl7OC/e7AGgGkZXL0gEzr+4KA76wNvGdkHi3WWefadr40WeaN9Phi6qe4taG/QPD
-jR6um/ohnlpz3Ne01HASLWdwrGhOcl4Dj72t6it5TjWSVM1dcnzws1PaKfDxLZI3gDH4zvHBI/H+
-4dGmaYvFw1flWZ3Xm4dxgKxje2Dx4Prvv/9HOJebKTC/sr3+r969+Z9+8ANx5gG0FaJEnpJ1oU5J
-eSwCoPmkWZ8B25sWjXGKrttyLhvNygZms5kgUodRvWjrQkPCrmVD+HMYfVvUDRzsz6plW1dz3Q7Q
-LdvBnxdFve3IXtfzx9glHmVD/LXK66YYwB8wHODJY/Usk8/w5XoZei2eDgbTed400a/LNrEHKrgI
-doaIBmTGTF9lLZ9l6mFdrKqJfDydV8uCnzfTy2JB/DUhCEMC9OCybVfm34380TSX8k+jOa6ZoMwz
-OL3mhSRsfDui7rL2gxjKxRqlD+zwXvQOD93bvIEjFyZIo/w5kvQihwOrxJMYH0X5RQ5EDcLFwcky
-VkSGwIGSy/ZkeUEfL6oWxJrZLKrq8gI+2E8Aj2kTjc65Bcx1eoW8ez+pi5u0OUGeIPdkNJkgrMkk
-aYr5OS3f+BsY+DC6n9co292/j0t+0QDe1RDuRb+F7QTDr+Fwm8FOifIVrGABBx3O52x9YTQFwEWE
-2Dx4+DCv23I6L7KLRQ64qeqLh/wXElR2w+s8mvJC4yo+fPSTn/30iy9SBQ42LYxQ7zi9mMNoWbSw
-44YRk//1GoSZIW7+iwUOjMiMaIvQY0FAVkBA4HSZNbclnOoxL67dE7UFbJUgATZAn5dEQtjf8cEI
-BJWE2Nm8aeHwTOKHcZqeet8vi1tsBd+5kB5YO4lgpRkQwjyfFkl8coJkBzBN+B50gMAzFfsoSVzs
-iP49BKU+sPy8hRNhNV/jLAWCzuHET+IHcfrgUUfn3PD4QH99+iAwomP9/uB0p+Gp/po1iH4JUNow
-QqJNM0XD0I1PuJrWictMgPtNjP0qCB+pDvvRK87TgS0D/8UtoV7gMQNyHGzVpfwso7nhwyZxiEaI
-RPgu45VLIwFA/oZ/YWcIwrsXojrsplyui4GD78kib6eXNMysKfJ6epnUyCBOmvvMGOAPYA3wX2YO
-SXY/hR+jcyAmHIG3DxTMg27a4gbZBez+VfIolbOwPgC0Bcf2ex6c5Ek4lmv4jxhX16gUNH9UvD6q
-Qf+oeIJ0tMMXIWCk8BEZQYOB85wZI9GCoikQvuEQFEQkFWhjBUGXPPzAx/xlodg9iPkV6Fo5P8Ux
-AfvHP2dFAwtNQBQ0qY7i/1BWEMKbFBuyxdUM/07iEQ8mZsFpZPAHHF62XqJikUgQ+m1bb8I0KwZg
-sMWHIeqU7WBQ6s8HyKyslo4Mk3hgjmmU08UMxi8JZAT8pviAMxrl9N9z+u9IKOAa5adDDx4gB1lM
-06LmMibgE34GjOOyupVvyAgAHOB2NvZxAwwPxu3gh2UrA5OKHGjcEyTHakV2E0EZ8GRIi0t/ypc2
-nTzDb4kKoEmJZ2Ek2kVnBTCdQh/lQC14ZhbLJofDv71EQsovGgUNKRzE0+UUD5ZFvgG+WkgB4WFE
-kgdjMFOfsPGkof6Pvnr6KKrO6W8GgywLekDKOMdDPzMHbuwTHjcdGIhvVF/hM0RIk+D0U79ttl7N
-YBaJ+oB7NL4xzv8lSgnFxOxoVk7hVAH0XqXEmq+G0Q2yZt0B6NQL4MupIZUwrpm1oNUgl/PEs1e1
-Y4RNmMfEAn37TRztW+wBwBgtoWtvmDb53CMJj+Q2xCmMOdS5wXiOPYDHusNTLWQUczmjncZACsrH
-D8HtWyqmmi2g+pDBKbxM9p5V6/mMuAqKENC53W28j8I2iPxrVNSRuBdlm+0xotPQgIxtpLcfcik4
-Z3jP8XaTzDyw53o5ksWNSPmNyfqHy5Yx9lFhiamHU2YfNoXvDl5sa2Jt1/EpME9jwCZoNVFjz/Tz
-FdCZy7oBrBd4GE8v8yUalkil4/OG1G5Bjp1DN0dLkORQQ/NGBafABWoK4lS3OchIM83UkkV+tomK
-G2gjttQiB5W6tsR8YzYHnmQh2aLgMj7TNf4+/uI0hKGdpkpT4APnEvTeO6xPddbmaI1R62MLlkMh
-tyieh2IVPEpcHPTNnTag915YAZBhRQlgf79JXX7lb1UHcGytS9zbSWyO2FgOQ5JJwjtxaIJKg7wD
-+EV5vkniZ6BMI1/Yb/bR2hgxD04URAFkaNlAxOGR9koh1j4kCwEvshgugjgNyaNBSXQnwuylRr2H
-Xi/nm2hWAadUp769LWCu5+fAknkzf3X49LmvrLIcp6hMbkHGjKl22BsmIONtxd3deZig+27ZmTWO
-z8KlHfGwQ/xTYxj4GoFSKKzxK4x2TmK6rtEyIoSInScDzUekr+J8cHGR19512Ebf4eEr2axz+ChW
-3mXc0J5IYBeU13kJAkWFUgwOd1rNiv5pyeEijzoN6uM43B5lHIWOsaV7D3xl8k6KirlOAP0ui2TO
-KMtXIM3PksRYKgKYBmdPMq/87S27fGGvtSlWdy63UhrusOT8Da16fScqNUbUvaJyRD2rSk3lspKR
-KR59GafAyIKrLETOMagufPdwfEY9nmF3BpAU+edZ9EM45O7Hp5+VUngIdyUWA19heuEGaReGiWqM
-Rx7hGO9s2sFLzrq4XpfAWYGhSLmmxHNMjnVIkj2yFFP+JEu2Legk/uzEQYWNs3l1W9SOUeyibA9c
-y4OATKb4mI6bVaVFnIuLCazKXxfTVl4A4GgzfE7m1VRRCizHI6SV0GAOQpqHZQy02bt/0vYyMV9H
-3rZJgkqy/sicgzmyUjMFVw182cYNq2PWi/M1kLbEFqJ5vxkJqcvF7VDBPjb6NDYMqaWJMx5zXvly
-5mwd460FlHajLZ2mwRmhcnNZ5DM0Y0jl/qMnGJSGAv/rGrU244uhI7OM07RHIg8Nb1bc4Pjc4Q3c
-bQxtf7nf3IN2YzEjpOWhSRFDuwNfKiNthMzrB0FTzxsyH8FB0bTrs+j921cN+1vEeI38y8uqaRHu
-Af56SNsaL84i3M1R3DSXBw8fxtqG9O4yxzUTHGYWnRXTHO9383l7Wa0vLnE1N3Tje3T0Ff8gN48S
-L/GEr4UCRle91FEecU/iTgIWNrvI0AZ7uT5Ls+hXIFLfFmway2UbOAgUJOTWoHVk0VfFckq+TyhY
-3vAdHV3O0SUGXok2wqhM+MfXuKcQNWFDmWAxMaIBNwPbZ92brbwB7LXcEi29B1ZbW7oSDyWLQIFV
-ER1xSH2ViZ2m7k2C0kjdy5WwdmpdCOieBHSi7h7i3r2/QZdpfjDAu/S6uCiRCSCAdHD9X7//p3hP
-XXyYFqx3XP837/7Pv0eObIfqGdLRDIioRspCtUq425DPmrh3fsmeAsRVD+u6qhP1vfY5+3WxLOp8
-Hqnuotm6Fq5m6msT6vtl+Ulw18suyM9LlAHO1vj8m6p9gSbaxJuE7uItSt4z9OkDGqaTUX4NMs0S
-Ce6sYEMvatsNgGjON2S1VDKA2f2v8tmzarGALRCaj9mZ4dQFEldOhj7cOcKti7pEyNf/4P2/YA8S
-et9kYubXf/bu/4mV50QFiiW6FyoPBRifdEsQs3+rRzyMjB9HRRv8jO8IjXk2O7lGKNdJ2cD0IBsa
-3nCmN0VTqOnzRwKJug3dfsi3b5hOX+BDo2tN7c7U9cK7dC0XS/zr+FQIVMfC+wq6pPMHjpyL6Pj1
-m3cvX39zdBq9efrsN09/fTj55unXh0dZlgnPijVArMkwJPqS26uJDU8JeE/SbocfgnHmMH+wx+3d
-+zoXXbSGdZbPZsL+YAvm8YiNPKNiVrb52Rx+We/RTDGO5cvGeZtPEeI4ZgHc+5TMqeNjR/mFsyq/
-yetx/O2zowdvD9+8PprAkXn8y7eH357SOS2w6YC7LOarsUJkLlGJzi5w+M83bPrRFqIsOqrWNR5V
-JbQ/ExdGsPfsm7iYjElL3Nn19KHoOkpI8B5NgSxTOs0EHRT414gYfJSs8UB0wTXAm1fZagPTvynm
-1QrO1t9Va+Qk6L3CPqbAWeAnOQQiBJ5BVW8eKutWwvKDDZp8oMT8RwWNd7GRn0A/5ErDi0z3XDBn
-oNObcgajXsBKlCsgNpITMhf0m6ppSljg6Cafr9EmDgwJlifK6+Igam5An7nAU+nygm/Tvquz+K50
-VjOdGdwkSGomt/ms1Pbi5atD3J/9ZAX/j9Ky8jqd45E6I50BHl+UN2TM10OM6NY5cvFpLgUuPSwF
-nbTOMtwZieLGF5XLEbrPNkEcku4Zev1pGIRdGkQePMeTUfnCK+SBIHvXCZY8QeL1I75Swhvuzark
-X4HZUtuJ/7Zn3HKqsfDFQvgZuxaTJ1YDh8e8eBic7a+AJ6CMj8rUG/pEHkZwouAJJd03o33xR3pn
-LIyKD22dm1gIMWVsM1Gzb3af/keRAc/+EDtlFac6VzyYBlE00hUVlYEZHBlI/fDIXM47Y2JZSbeH
-AAqW1ST0Us4P/WeLSVuvi445spUpxA8uluhjYM0vSiq8hkAyR66dt5G5ERknyJ5B0b77in/NhA7Y
-Gy1KFFPCOxveT8LvP33O6CqOTO7N5s3LSHTCuts5sEX0dyUnr1w0W+R0sU4yGSqet8u7s7PeuYbf
-/WnI+mhVTMvzcirmzYsJpEuedyytW4uDU0aGHpuueDtN+YwXmsND9J8jEAKcnywTBFHDAjV9EkbO
-85dvO+ZPfluh1SeHKM27SSQCMAY7a1gUos7VrXldoFHFjhG5G0YEGpCC5lXu/NJ4MZ/0oEY2+7zY
-eS6gBvAjtjyyQyGckVP+ZbG4+3ZQc5zmINf1zy/U5BNm+AzBRRJ6Yc0U53n3yWA0E61cQ3K4+bde
-Vf27Z01RY/ysy8keTyhoa8VnC+kjlVvhTXfDxnvB31cXNWA3zNuD77bydbF9+ePIVDKlH+WyuIUO
-IkBWOaeZCu/yOy/pSyGc0eE4UipRWCqjRpOuRjtOS5zDOAutgan5JXVhbLmPPntRxpgVHOEgf8AB
-UiynZdH0Ta6n3WcXQay+Pk6KYktGpxwVer3bMu0p9ojnA7vWWpQ4hAOiBebCYYBo9AJBSqqyyCr3
-PmrN1IkRnlLHezGnPT2nvfCccLQzPbONMZ2/XjetOQFQ6VHfmddAgBuTi+5ZkPcS9FSdFy02XioA
-aBggI725UOnenZmM/HTErzu0JWoinVHuIkIxXlgDyOuLNSvAwGRAu23Wq9W85GhOxIcyg8g5OoiQ
-xj6MtOPLEnf045M9/ajhGMyxHUt2speC6u1ARjFWqdkuUOVEDMNcoRFQtTReuGP3+3h5Hm2qNVpH
-IjYC5RKyvGxRB5oIwSEUISdjDSk/a6r5ui3obfYRK80Rs30LzS0+dZ1FZK6BnZ7Fdtc4J8PbucPB
-pZ37I2aNN2idehHFbGLM8EceNXI30wrVI4JkBocJGZd5wOScLM/i8l95dmltvOOq8BlwCAxRU3yd
-4WDQWF2t6pJc2CsRim7jy7wihLcY2V3RZcGiyJetWJoKzva6nMFBEZ1tYKHOyNaNLGtZtZafJyyf
-NJ12ObDTLZNlaU+0aWksJp3pR1vuhjVyxvrPLd8YCq/q0Hi25Wv3S/HbWFVgve4aigg/53pSglCq
-l32L572OxpZS1AVOiLZhYOIl3gzqOxPT0V60M3WeMCR9wAMwzAkRbOXKbG7b0CTdFABem3TQPa2u
-j0ULM9bBOrXwUzlm5w0o78enISRZLMIJ8bFhSH8iyXD0MGyeaozCeeEMQhE7enbpgYun6B4qH7o2
-vdBEpNGr10mYpEeWisk0dyDchONhzH5eBqdyXHqt0Z4OzOgf5FTiUj3MDkOM0Ly3VobzCfrDj51L
-SJvfKwoaezTleFMytYwd6nHOCWOPjEMbp6M5qdn+B/TY/kTob5pN8W+7kbvFxl17L/iZqQS4X5rv
-Ust7kO4y0SYLjM114rIWhI5e07XN467+RXKGhiBk/0XC/k+oc6fhASgql9eJf6LRSPhiRDKdws1U
-I8145gwWL3TcAZs3Po5rEmbUwBvzZeC2PJGwhmLzjPkfddiM/RiVXZEBf9subuymZ390mTeT7rEb
-PEWf4YGRoMtCwAskxnvNBSpCeC2G9vB5kcMvWH/nOkxJcMZYOHIB4182ILagNM1izJ55y7nf7D1J
-w7D2Iw6SY/TuRcJ91Z+QG/TwMdMxx+0PJ8apEK2lDYf40yRQrFTP98iARE6n+GCsxp7aq5gEjyvP
-KRCTcQgrDrCM8yr6RZQ8HkY/S12/0K7J8tEW0WX0fEMSfVWLm05xqyXgR4+zn5Fej2ak2jR4y5xW
-KP23VWWfVjtOJESxij14rZHGL4qWM1CpbodRPFFuO2hnJJtKamZp2AUXhimQAkUUSkilM7tDDc7o
-0N+HxlEtT4x+VgeyHSaNoCD8JpFcApjLFDhKVbXknIgLzYTDjiND4UBiPkt7A5vsTslPR/bZOwm5
-H4kMZBANd9jfhfgwcSSsoSNJpSFxjOTPOBLyyjGApg1jsNwdXFHdATXrKXqno8PnRh+2flSTehWK
-8LYkrSMDojF0FrfUb8cBuBeJhvno0xHQjwHdlY8C/e5uODCGz0jQDxws7ExGU9KaRaaKxPhsIk/Y
-tDdijj6fSX8rzAtDA+sHdC96BifAEuS67Ttjy/ARznoldlrfplXxvBaAwWDgOKulg+v/9v1fWE6B
-nFbi+h+++3//XPkEGukGW1Atbut8JX+vrtBNna9fVCMEKFfrbs55Zu4lcuIv6gVmCwCu/13xCQmV
-Pizm9Wo6L8/Q03W2nmJMyGLFXyELxn7VUSg/OgLOPG1FrqRh9KqqmkL8Ut5/R4Sufuc/RmnQ9+/f
-vz98+zvPzY+B0g16vKs7nzWQT/bm63GWCL35BB8Z/P0ZHWTubpHx1RkzyD4+fPv29duD6OuyIQut
-8qeXlmuU1Gi16I4/zeJQZL16xI4ABjvmdDqe0mxZJuCZPlURX5PLslWRKyIPjEjuoyBooKJxW+fL
-BtOy0deJgmOc2Gq33ZYzyqdkheWI7G4cWZWVmMBz4wUDuhC8TZxQXJCeTl1S9EQD69ckOJ6hA2Rs
-/zSWWMycV9mb/4GFMhiK4gHZUVHDVn9TVx822oAxtNhWplpPCHHIEDyUEi2LUfwxxu0eH8iBxGI/
-yyffDzHIJvZ4NIISOVmdFaLFsbOxvrssyJWQ3SZoXiXGeNBFWM6vYLsIPtZk0W8LShWg3w04CEPc
-fJJI2kqgxpcRWblRjKfgPTYVTyvMV9FSINCAgwTtoeCFN+o5twV5K64x+6s5fCMJ5x+/H0i995KS
-rxEi9JoJ3gkPjxmxpwa/k4yS3ko86waSjYsG8pragDDFuwTxekI7oapBVIYNHp/aAiwOA7kEKeY8
-+uyq2HjxkvLlMX5xihOU9MCapUEO4i94JrEND4/F36fYFIeHDfHf73skcbvTYw3vVNoexZN04MRz
-lecyTS1HdZUXl3i3fyPPOhF9onBNmRtxOK50p3A9lkAm4lHSPbqAncJtrFaV4lno7+3fEN7oCxqq
-SHBZ5MCdaxA96hnmHYa9sF6WsCfl4t7CaVkhzZKHtyByzuQBxz9Ttdg/DWtyeCWikCFNl9RgLJqo
-yWfsCY2RpEA243m+OJvl0YeD6IMaLwXfYB6YMVrJUzNNsglbMIkQw8SJTKbVfL1YCp75+KceGyWD
-GiNeqRNmWtzjVWZFd5ILLtG9KeJlGAqGiZlBnjz97FsYLd529gvnQBExLTgXN0QRpVLhYz/Dc1n2
-Aat3XrYKkG3/UOOQcm2G/0nUFnW6H/mohmc/TTuAJvHJEsNo4bSP7keJ/+2D6MdpKpLM8lcalIjA
-jvcb6ELEHCKEbI5eAz4woDAPhpeFzNZogonhpNHSJxJ/186Y5m0KwSPfDH9KfMXKGIpYrQfj6LHX
-xBu++hb0MOrZ5Tkmw2/cAHNTKcXAZYN3McCDTkXYvRb55ujd01evDp8fkPGRvxamOQ043LvPxnft
-aDf4ASivnr47PHp3QPZOgiIGHEjvFsjMFl6wkblgHC+FwXiYcOKQ0k6QVcw9rxop65B6Vhdy4Rp5
-Uj0ayiPlsWBWFhHIJByLVWJpZ+pz2AXBF49Tme9YOh9jEPPZEqStfK5OseV6cUYWM3I5QPcj4B6W
-3mfM9lvk7M4sjfGZn5nDCz3H0TFeXIKW+BHIUIo96rBJIo6TG8Ta44MAUulFihT+iHjjzSNksfBI
-oRnl/4GtxaaD68H7f25ZBdbL78rV9T96/78rPVu9+k6nVP6rciU1eh2f+Z16mOj3joZM4D01mL5F
-dl7elLN1boa9DQYWYBjxP37/D1V03/U/efd/FRSi+hZUFUxvARibk+0G1hPV6qG80OyuqqAzR4u/
-FiWouZuVNnO0l+inhUnQpd2jasoPeAPdax0BKX9WLVSn1fSqaNWvFoXQnQwdpqnk5fI8/ILpG9Qu
-kVWAEjTAUdcX6hgKfN1mX0lE2nraDttj+//9uljDaNAkQDYZma56+5fv377iPqKv3r17I/5cb/8O
-RBvYMpwxSyUETrdN63CxajcYvEADpl/6E+XTp5N2VyB0c7bkSVuhUvmYOsQf7L91gfsXVM339Rwm
-/vgroB+8dixQvJtM0LQ9oWRhlrcM6iSD54cvnr5/9W7y9Us0SGCoI2ZbgT3WZo5JRccQ2z431RkK
-d1qjJJ0O7wgardoMBsoxyPQI4rSzRd4oYzemgyimlyxIU24ZEc8ig/0Nn02sKEH7BB5e8HWM2dxy
-WqK7NqU0dqXUNnyFoi4fINPlR6SBkT856SxGnogWE5mj2zSroXFFdwTY1j/sRparQ8Dpghqpe/WN
-gmf4dnBOG/KdH8OyXRTkR584CQDR4w8U1zN2yWAwlKj0MserRuE2elYUS+YWM0rpcFndOrPil2oc
-IMrbSfIM1B34KRE0ypQNaoIyn/E8kX5VLpKD9nW8kEri92RfM6OGtLOJtJa5/fem+wgMVvqhIEmh
-8dNdFJnEiS58jYSPdm2X5hITb6Jb71khPRtJ+8O3Q0xVwS3oTtQEAoQ+J4XECv0USWGlJx8taMOB
-RIW2tQgQHhmRfhkYKVB8NUStVoylrVGcAfZWNBNiYUmZFRlfQr4CSGk/uaJLERoUGDWMxV8CJ2jL
-KXsQ6q2KGvBEBe+rTDpdroxHfLipWH9Ut/foimMvSlAsKW8KETK0h8u4NzTygujsvIC/skZc4kqA
-TE4Qhuw5FOqW3tubMOCsdC9ardngRak5kFvpAF4qTUCZ52nQ+MJgZQYMUt8NgzgZCTifIqWkPwjk
-ADEPEW7lKmpKHsku4ABpJvg3p29sSAJmrofmViqygcf+w8t2MY+DeciVu5q3S8MKC+HP+sZyf8FJ
-lDrftu8ywun/dDoqTsYStAyRIdk4Rv0hSjcU4QdYNrOyDqHWgSnbowFVr0bwC5wT5jwmd54mwxXd
-0gdtJbXOVmEWlgUQXKBEAF1p6plQPv2ebhxS2mHlOhdbKBe8c6iZ5pZ09vnpyuDJUHqs2VkmZZIl
-vG6WPwdOKk5KOmBFgJ6LP1C8AU5L+0yDpcPEvCPx7ifs49g1Ecls4DklKaJDU+xsHu20qkUSCOC/
-/q291z3yzUQJ/cYRpYdAqXJlB85qy6olJWUMaKqIBQA+DC4KMkuWmMSpuso3athw1OpkSajUeEo2
-XTDqcxkfJPbYaeE8hkJfwoB865qzpjLvO5CEfJoQLvzJp3r23GnEXgeaJsyL8mpxxikKBQ2IGwoz
-mpZK5qFFdj6vbo1vMVIc5IcFWvaN7+mo8zItGFndQeS25DdXonvgyREDi5UvrnAmdIeAefjsmycW
-cqbIzm0qoTVQZOFQxQuY6yL/UC7WC1bp2/KsBL1tw9gwxfBhVCw5IOSSjyIHFFYZIJ9NPP9Lim+j
-2ihZFD3VicBUvTqRnwKlDwdQgmcuCWd4s3OZswdESyOi7Pn4GUJOSebk+n+8EO7VA0Yu4VKIA39R
-XlyCJAU8+IqvJQrUFygupFq6OsdZAbu2rOqso7bCDmUVuKJCoJiCYH/w0qmNw2TfaXrWsgvIDl6P
-AfoYhA4XcWDa9HgqyS+geoSSw+uB4AGVOJ8atmXZUop1QZnPdmhVzhHkV5LJ0CNlbAqd8B7mSHRz
-+GgnYrcjl/idvbNs4BltNGkvRXQqgANLFNQCK+9Ls19md51i7WAQHCwxRFxuc3kt0KcDRxGaFWfr
-iySWWQOEY4GBfM6os98ccPrxa3uFVKGQbsHL7ui+NAd7GTIpaZhaWjeTrPlOUpCveimnfmWO9HO5
-SiRxGRILVQ7x0jUZe67JdKbGcboy+/ET3/LJtnQORWMR+Wzqw9XTZT7ffKf1bjYDIVzGIf6ZdWu4
-4UuO4AWHNRdrg/oYpV6FIaQHPb22/R67vsERDORq3rJlicN7IMhr+pcXb2z8oRx0GxM8NZmI1jco
-HOPbTKp2ZIOcsf4/NBleoPNT098fGcjHIKgbSTZP2oodlehPD0M62dpbQz4Nrax8Z00niOTzHM7j
-JHYLkCy116YwTMk0h1yBR3u/y4Iv1yGn+mDmxfibykqqiGmhKDicUyoyX/SAUnr/64yHASLF5GzT
-fd44jE2ng7YBsPecajhE6/vQ60Zd0lmD6UGt9SrD0ybx/QKCxP6Kas6wQaOLzk31WdJ82k/01oBO
-XW7iHhD2HnjgNLDkbfMrG8wDh1ofhOhUw8Lw4Sl62vew/sRdLWueqZS5OqUYdTXITkC4zPD/PVwU
-VmRPhc1hZ7AYGq/7KrmuMKrvN3u9lxf7UWKNeKhj745jaRbllWyMAWYow+OqpbsVvgvgUm0AG2Pp
-FvwLB5jAS8sF5ubAcRsQlQTlvefxI9QdwztApjyc6E4wTraq5szQZbJ7m55Dwy3PeYlKzON5furx
-1LXMhwIsJtCrRy2BPkAJhrmIHvrpJj6UuRx1AICcQIJ5k/BOpmpwWKO2Gs1yjoSJJN9pTB4bbyGr
-Toa1gwnpToMOji5K3ImolWooT862TdE1/GH3GqTp1tTz0ifYB3GXc1DNxTgDhS3fOQWjhORI1cmW
-qfO8hyZjl11pVt7B6oilW+z2NBzTtuPxS+xL5KiKzFGEj+GOVfkivDPuoecq32oZGU71EqObl7DL
-nxmOi91n5MudNxQcGBrgthXpJcSOVdqFMR0/Ojg9JVtZvMQCSruRLtbLDQBLoy+jRwfbbt6MEyoR
-qZ4Mh+ADkbyqExNJ57b7JERYNCqm3UlKxn2xbZxkY7Vh/rRtktbF1Au6EUfKwhwfZIkmKwYVPRDi
-/JBz+emrIGHnwGMdtnerTMlOsg1K98dpizhjeqXNYeSVhHlmimVTtuVN2W6yQH57y7TRZ+66F32b
-X6yLOaaOqVt5y0gmvKdvXmZZFt0WZT2jlEo4U9ukpi8Sz3Ia56KalSTIl82TcKA/qVEPxpZdLWyM
-7rJD99qgHZbLKdW5BB/be89kwIiMmfMiIQIBGeggI83atrNMYlqpU7eOzhV7hEvt2x4qjcS0+eID
-gkmiN985GSWtfWOmGpQ3KgSVbo2ke1uARiDJ1jly6ICVtd7ELiCoaVfxXnwbKtxr7DdtU7HrIDWB
-Hfa7spjDFkv4koMkftZGZAVF3lMGiOaqXK3QMcq3tGn/h0K4HODhIG4DpDb68LJaFERatFrmtkIR
-F4W6a/TwgUUjT5/kzoY1C0y2WreBsjOzijx6LbePYuk5YLBbWUCVKUm+x2qTyQJ46aMvhsTxtSXS
-DdtGD1nlpJa9o7+SNq9hscZ6S9KQZxNeDYxHGrOQYU1pSKMf0ogdQmkzGP/zvFiA2O5rqmI2Uplo
-vW/xDtjBt7iIoA+dGfEx4h0JM0WNE9gJaDi5suYOKvKVWajNnbh5ndk9bz2W20u8+3cO1aA1zyjk
-bNMIjCDhG3pb4GbnUu1w1lVS22UaJpHiaHeseI5NMRAwCdfHCjNwXZ/q7veIwVEgkiWJ4Oeplx3D
-4rl1MRcOOyGHAX8rkn8LkwdGwJ/X+cWCq/ZwLXW8ZcN79jqmygHJ8e//8vS+zACpguYDzZPjfPTd
-F6N/O8lOH6Qj/WsEP2PETvZSAFkpA0YAzGi1SY4fPf7x6Ul2DN+fpj+yUobx1UOPg9KerDRdzLVT
-ERs+0V+hpjQAIEiOYCrSfo0lbIeRfoDBuRg5yqRdzMsFlRpFLromeastDFMFfEfJwBA5zKqGWxmb
-uYyBU1Pq4sLSEqZhTa6Wz5IDIzNXOew3geOWFBeG1e2oL6Yd/lxwJImaB9STsZieRdhY0qG482GZ
-4yCIOfM2ikjCJQZ1YadbYxO/Y5a/wz3K/23wrI5ujPGHYelJdEE0D923QpDHuk5FTfYblE9rTKGI
-goFpGFLb5qrYDM3oGOiJnV4nyphHwoAolEA5sKbo823LFRaITF/efb1r+juZ9g7eTOcl7rbOglRb
-aFEyFi60aLfsueeVnw1xjvJb6dmdpKGrcXhx0LVZ9IZzXEN7w0DEtdiRkMykpfPnbD8hczBdLQa3
-VtATtWdby4Lpp+7cJCoMRSgDgSJ09X8vouzsIBfPqjVQ3Ihu1hourRHa6cbqyD+PD0Y/OfUb0ipw
-x7jfP7TeMD+0Ess8wOziO1KK8c+z7x7Lv/lf8Q5DItK/rVVbL6+WICxHwuESGcgib+VFgmAVMI30
-T7GYOmCX2ZU6cMnGBDqFpniDtwwd4jKM9X0KZCcKbusKXdQ5BpKVpwSRLLJ2B9hl0ChjzIst/HJW
-lgQgw8Y9oys6duJn9tg1hsRfxwfUSorPNlHqnuADbndRV+tV8siX1XTTH47NfFDHBz8+7VQzXRzu
-N6q+oJP1ifKPCmNINz/wUWd3RDZI7zLFoU7/gkKCTXpvGsRnXgwJQXUfKlcS40T0yJUPwz6iPeig
-EUPUlBQiH3l3EwEysbGmzdI0Y22kxfMQhyER6C9KyMAorCQmPX2RyutoM1BBxovgkbvX5OfFHn9L
-p7O1FBFn5PGDhummW5VAnFBK9Th1I/NNB2IDx+qKPFhq1h7/MWrQlsByms2FaWYU96fB6rODWAKR
-bQERH3717utXGOmRaa1KNh6yJ+hYR4W4qqsZa8E93Sm2BSPjCphkcYPhOCr7g/aYVJVZhOvl82+O
-IhBKjCD89RJENfNj00kzp4zzIDHNemt1BmNMZGkUzh0ifiW9cSRwtIsgBzSynuHQZ0B6NECOttt7
-Eh1iodJl0QK6kL9jsvAnzu0Htsyay/X5+bxI7CS/emTaUdTVbfT7SGHO80H+Or8SObylC3KOMRzl
-DDEXEt2SPZE5Zw+vA+hHo36R1zz8SHWPE6/mqT12DBsTEPebPdj9+lWoe/1Wy1h7orLVXrqto/1G
-FsHq6cnAK4kN5jK7u4Zca8wIICPYTUZNuYFuXJMESBS3HMegqeCy87ycw2JMUN1FKe7HO+RbYu9n
-/tDIZWI6vl2EX8hwFn6nemorPDuWGwVU7GnbmVhgwO6ezEhkSf8ijb4Ux4g1KbsMscGbeqDTBCRo
-g/eUjZzCdhhyrnqEwtpl5O9pJyGIQw5q4kv/gw4UHlPK47GIf7KiusiiLHBgQp1jKcXOhZQAu/H7
-gAD4fek+GjZnO5q79v0MhdMxsmXv+EORtDwiPIp+W6xgbGzoWcpogXyO8iu5iOPdKLATReVmABtG
-EdbFBRVaQz59CcIZ1Wy8BJ1eDHAi7eUBW9Uv2ssvT5r72AL+wUa2oUvlNu78VLY4aR5waT799T0j
-zhEv05oqym+L87XgaBM0/LlwYZa/OP79l6f3Txp4DfDH8P/H8d7pk+T49/Hel6cPUnr9JTS0RnpZ
-F+cetBifjpMnB3vw9d7p/XTvb05i+PMkhr9P4r+BP788OWlOTpbwW9r4/gb+cySg4mWJP3ccJL6B
-OWMHwVEa4+sKfEXjKZXdJZLEYuyFOPNduhYtYRjiL/u1KjRtPxYQMbMG/2WOBMQinxuaex7BcWQg
-0a8TGKjYzxT3iSfssOCCNz1qj7scgAct3decxxRQhkaQJL4Hy/KI8owZhxn1kvms1o/Y8CQ7M3pI
-ljTl2ubi/mlWmbleKbh5vcK48dvijA8cLQep+PGbqQq5/3baHPG3FuMQ9dOBd+gGmeg4GFAnBF9L
-LqbmHD8JTVje5WenCDl+cBBv0/PEZStV5oTZ7ksADV+3sp6vPMlBJO2+SrSRKpel2ycTfYqQhmnx
-FAV5EqBItdofSyBLyS+NaEzvzgajyEwaDBpgtgzaozrjlPPJbZveZTqICvWItoD8HQwnOQMGjFYn
-WOFjw7TUYXkyLE6nnVOR/Wk5UHTSMx/Beyg2FZcRJDzSX8zn4ZgYE4dG4xCJG6GtPUOhi3mMMups
-0Z+Zp8f8IcIAlA0ERM1nPObROxizY6cwp5Omvf3tTmoW0onmHOmqF8l9pGfP+ddF2xpTxmnZcrqw
-AXCZz1nxIcN1ASmF0nCRsiKqYpIxoCrZ5i7rHJVGaB55FTGnGaLiBrxlKMogrfI6XzQq6aO0mad8
-DJC1I5N/BJmF4KwYF82WamSPdviwmSiEY2uDRmUQAp3YOqk5CRENZRiAhffWyvUXVO8FB2TAwWmb
-+10H1H4nH/MIdFxvvNNRoUVHIMZ6ieJ0yyHAyycjc1VhKspp0rIRMEwNgIYVjwDToyShBvlcKIyc
-HyXBT1InJ6wUTPAdO66lwZMCjp81AcjI3SElL2LuQElMqZu5KjGyyegUM5ybJ8Mi29W6Vb8Lfvv6
-SDQzc98460N35xuM4plOeMzHj07dpLoYkgiyVeLnYisb8qldToukcIcTdgKaoNBFIY7Kwc9vhmpM
-NH7kp3PjscTYwwzrWsYDLzLdHpLE1ccPBkBe5g2l4sc0mbBUDZaUZBdPo6+M3+yAhR1m4mAienSH
-y1/5zeNtuFEkxZNBZWBGLOcnX/wkwEmUisY+XE8+cXW98XX4cd8F5KNB6GPL/5n88dgJzzn2cEnS
-tAPCbzEGHuUu8S37V5pJglS2JRnPVZvgM+VM6O2hHQ5QPjA904FhNNjF+3ZLR3YnFJGjOdPpkPiX
-d3VBsmpfspWgMGUZeuHEJnOuKZFIb1B2tGOPWC5AGH11+PQ5eQwWTWuabMMHsXMAOwcNq2PW+SuL
-FijNJqHE4Ch+kmUT/zjn3+f403VqVbuEoswxRTcOG3WlClEDP564H6AduibPKGLcT0JraSQdxZII
-40CCrsRSuf8Yf1U1Lab3JXx8b66cddxZDlIhbUNyP/wOZo1MQrA/OsSIafwQtvMXX4gsvIQ5dZvc
-gywbYVL2ELH7dcFHeNOlJsXB6Hjj9CXTWCyIb4TEhwOJzbDIQMQpT2peNYX0x/slpsyBFdooqiYT
-CsoBjq1BEI/EGJtE9iayuWuOtq/WhF0mM7KGSyOJn1rHv1rT9jrZXf/dapjhehCU3SRoQ5UNuxCl
-fcKCHrYYHqm8wGRaESsJEmWaYBdq0aUwilHkNTrn2IgKZSqykRBJkOLJY+/Jj9MAEO4FC1yQ/0Di
-yatc50Lggw3PNhz2VNIB5YQVox6Achl00OVGJhcf0NutbCedTobck7loLoxmWuerYrYLCDXAQL/S
-KXXejIFNCpMsbjJVkjjdsu4qwTtzesqCif5aW4iAzaw70QCHFOEYPVLQujqdA2mg4tmcQxbnYTOL
-eK+7CEtkvlYf3nxBz1ObSUjqF0zCvisOelZ1sIpgX/8p7RefhAP7hu4R6BwS6NPXBkOBUeM6oIM9
-E5Cd2PJd8U2r2bvAZndDDNgYM+bRrTO83hrm3+aia2vAnXr9k1LJhAH71zzHv2c36B/95YPhw4Of
-j5/8Mru3Pzk5+ZvRaexdcBijC10pAqPCG/OGr8xzFa3LNaIKyjQ9yzC8CrO2YvZFDNPChOvNJejk
-0Xpl1rfiUheojVDqIqorDQO6rcu2xXiuKtp//EWUsAs0lWi6KerR9bpqzeCTfVwsTnA0vczJybRu
-0ixQ9JhpTuIpa9Zndv4HEQvN5BTF+/v7jz/EBH/m8p6hsKyIq0FaEXUt2HVdREumcz4GL4o6boKM
-TJFjA8TW+yCZhU1/ErxciVXsq3Svk4MZut/v4IRj3z7x6Opi1X1dFf/iFTuTfSkLeJmfFtcKg7TM
-3dddqM5TE7d3kFEvO3vHl2q+nVKwtLe7QDoExx1uwbwmT3qaGAFqrndWILDNUvJY4xKVT5zSUqTf
-1yIfc4FWAzVRZT50XelVmjgfSawOdeDZ10UV0lW0qA+ScmTcHeBjwyVQOWN3XJbK1wFEivpcToTg
-x0WvdEzQBOKO0HdHNLqzTs2QWh84F0PGkoAn3iM5w8Xsp4GJwdMxRtecY3TMg5654edqe3XOizvp
-nU73VDqUPu+CM0CscCSF9FpZ7aWDGkJ72tvBsoAAGoTMun9UydjOTCiMQrkVGKrc/mCD0ikKo2nQ
-LyQTsctojGO7G1KZqCMjv5BnHx3PYyUvMHa2hjnIIAXR7mAQcMFXe8a6+1S8yymY01ZWslLtQyuK
-6ISbycdG5ArmSceyXHPKtcEMEF0HoxdVNXqUPUaZAf4cjx9lP5ZI0IKtpLA6/n2S3X+SjpJZcfM3
-J7PsPnqOWN0NwnQXcLo1aC7s5f1YoN06LSXrNvoMgBFGH+1K75+c+814bJSj0X7XgU4tBq7IU/p0
-XlZNS1WMDbGEPVPxI7MsmfQJVUSKee9lTSdOAFXxF8+/OeJxoLPqgZCMvvzyS+12ITLN6hqTArhq
-aQ6RJ3Uc526SfdTCz0IPp/5DghHPvBfiwHhdlxdsKYukICrzTKvSlI9VXMLZJno6x4wY0ddFmwPl
-5mu7yBqaFAVi/TgI9WYc+QUFZMGs2lgAxLPw7AU9DYZT1JyYExMJZH6BFqMDcVFzgRbrpj3bUMWi
-4oNadrV3xR2cbJ+XRbCoiogOKDAjYAFoqtFLScAS7HAv20N2KCcyK7h+U6GJyaYgjDo12cfx3n6T
-sVds0gyNrjB7jM6xxFU6JhyKbTQCLnwq6Nxqgn0Z7qhwqq1bHhO9Rwrn9qCOtLcYOrmX73GZBKzv
-YBRlwOpbzITZrRYaIgf6bo/Tng8pmwPlEsY24qV8a9MJNrjSEeUUSY7jdFR8Uu9F9YyER5nlzbQs
-J2TdwdApKhdX5O346sEjz5UK65QRY+HI7Q8h9ZJbDTzrD387Jix0hkEPrv/p+39lVaRBsQFWBHjS
-9Z+/+7//gVXGdpdqtINfPT06nDx7/fWbV4fvXr7+Bh2ZAWn38Dt0fros5vO0iXQ3ETmjoFsUCGxt
-2vS0BGoZkFumBj85evb25Zt3R+gUzKwCxnYZH1CnE4A00d8DQ+I2+PnbwzevfjdOoh8l9HPy29dv
-nx+N9370R/3r+P7p93vRyUnIbkatnmGz8Y/03x2N37x8M3n6/t1riZbD8aPoR4+iNEoH3w/E+Ipo
-VMl6r9HoReSMHZEyIB/H+Ds1v/P1ckov3cY4TfImQ5qfAQ7xnwE5C8yi0dMpP1YPpkujxWq+CaCF
-PgCE+PgwMZEkDGgEk0u9lkE0MOBHpwY2pu08Gv2mCwPfKzX+mXrXX0JYw/CqJD2lerloixDUzCGs
-lF8GN7B87IK45PjWcYROlrtWGfbG++mVhonYMb7nzCkDnBNhjGMqRYqXnE3rtKBnY94uoVLFtP+C
-xYUP0dvfWBi6ZWO/OYB15zl8J6bw3cdO4bvPN4PvaAJ3qIMMBPkGi0py0iMXpHVTTMOxboXxAVr+
-fXYmyqTaTQUCKWMA4C1+QE/5QKO/8HTlBIIMOrV8eGVNZN2WGjle9cSFw2PCK0sLiH1jScGTiIvE
-PQL2oz/GDJgKs+IfWKmVlufAHtf3vUVpuIIySDgZCnuFKjD9u2odLdZNS3FT0X6DlSP3o5hSgIks
-jyYC0SLgM490cP3fvf+XTuU2kZbv+p+9+x2cheoAxHtuVT6MWrzVqtAwMn4cFe2QQwhNbam5W2V3
-v86Y6JUupMhPRteMK+0y9V1F42Re+FBl9ePXb3Dhjk6jN0+f/ebprw8n3zz9+vAoyrIsUGhOQDIr
-zO3GEN2xfjI/JEfd0chM5BhiDOZCdDAdzk7RUY79+LSjgvuLl68OEVNBpqMRhf9vVC1qSMKkgtHW
-daNTdyGDGdrzJamWMSETN9DJtYBBYjaICD24muzOLHnDaNwUTRB9/nObVWNCtBAG9p5X6EORN1fi
-bF2elxgoj8MHTqlIErrhndlke3dixqbppimQi9kb0Z7n2bqczyazsuboCZvT1NOON+pKTr62wxhl
-AVgcmSvNW4Mj9Jv2E09w9BlLRvYoLDybOCH02LVyccesNYKnmkR0ELhCvuasQC53SiQsheuxZJ0h
-VaJ/Ym6WOrRWud9c5s2ke6ycWtNjeUms+D5uGYyumBdCM7XMc77/Yoy3VYzEBm9PQBdErQPJVD3f
-EzVpp1wvmAOKHby781A0nOTrtpoIEpfIy2DnpFh902HRcPL88/d/7hwG13/x7l//a6rD+SvMiPdM
-SqLI5WUVLy7LWYs6nWY5Tl2D06mRqct0YoaXAks3qgclWprkoSNPGnkY7lRXE8fP7EW24F9D9KA7
-Ly9eEyLeiGfvVzNKkvSc+WrzFaD/BeXOaE2gHbUi+47HX+UzdaZ4dDPUh6RxfurQomJ5E6kCpqSd
-wnk0wcfbSl4ekYr/8jVdHsrinOgKN4xu87nK/NPY9SoFhidIash5xcjxT+I02FKugs1X+f1Ut5eg
-kEwbtJGZsEXg7D0hq/MEGrNKzmZAUUJ19WGjvfAy9czUvIhwrdBKIVqoywopU6gHSmmiWNbtQoK4
-CVP7LnSKoaTqUVbi1LKEcfDupT+HjsRaXYzxQlWYZVHGBOZ9Q7Wn9JYfhjKKjMVJKpPWugyWCXnc
-S+aJA9phNRaHF0e9ZNl1xg+owv1ByOWB32czykdbWz8xegPZXRyKyxhdDqPRSOXiLdrprvnwguIF
-/6PnYhLlsZqrjB/WhLEo6otCCu0qD23ZlrkS5dUBZVacDOUJALFCVJuUWtQ0r2sqlATztNCM/pEc
-+wXsDXcVbn38lzaBfED+hTF53dMZMOFW4TChGD03YatMJCj03lZOa+Qm3cjHFFPTBUd47pMfG5PT
-5GZKn1LgnXmGTsg3qBPSsoJttwJQp25cdUsOolrIgl8UqEKPPfzjU/LAuQh9aJyV8hCEERZUzsUB
-xS+85rA6ZxUcgIEPxCsrKH69wiSWF7DdXHbCBdQVbeXlUsUFswVugkIbC5UenRlwLOlT3ooKomdR
-Cl8kJJvaLMum505KNmoiydgBoOlvKLLBeQVIcZFhNxiNozB6JWgRukA/J0D9QBQt0E2d/GTkeO8r
-NOH5Pw7kG0JzzBrmGC5rdJwQvKHU4jH+xKHNRAB6fvir978eWh2KdIGpXWVATi2wm4K5fZw2lv1O
-rZJNROkg1KG551XpF70Syxs3c8nLc8psj5KFSuU/EzUcMfiFM+9hRMMcixDzvVyQpQN2lzdlXS3Z
-hfzbl2/fvX/6anL4zbdxd2rjrpozpLndkBQJMNt1PschJnKAaRaIbOOQrLJ13ezuRU9B5kV9bkqz
-kNoccNLpuq6BJcFDoz9ChkqwbE9W41mzTW+10FsNtyDlLIY/jkePuFrrZDL5FvAxeXt49O7p23fw
-M+6KhPhtgelTY4pCz5ebViTlLxv+DyXIiDaFc7yKnU+dHowenwbHLsZsOW/4Ni0iDdb4UfcXRzyV
-Oaapc2VchTWx9l7U4z1NS8tKERrDzTHTkDQxofbIIaB4v+dSWBDvIrzzLmRngDJBkH8GVSRoEvN9
-RyCaeZBb41cwDXTAyZ6LWo5NKLBbDCvx6tbqca0A5BrlR2tsafTDcW/wbwhiN7LSnjBra7fu/ZpW
-z9yXWHXBroCztzWgmc3UwR1oQ852hAVCsomfYbRlut0FfjsYSX8IobO9ejvvy+Zolm+Q1d+D+0xo
-BhZNDDoIvadQxNv1csmFXnYGTkgq4QCUeiNXpe7atFbTQPiMBYlYqXPbbKq67hobv0jyHdrwhr20
-Y0lY7qnhdCxrrsNacLGHJZYxEhfWfEdP1sN5XmOVhywLVSgYBCNDmZNQndSyUWwFq6CcY+VbFP3P
-0TWChiGG1SFueAs9OcfANdSXUXSgwtNGc5DP8zhYhUwLTapilCUEMeg07S1dKnq384MIlxAQZ4Sm
-IHQHNS7xG+fNhkzTkZU0fxKFElJ7mrZWZix6METVebFCxXZsSj9SrzDA4SaH4X3RHcpHghcaeG3T
-rhdpvpst585x5Bbf5WjynpJxh9LcFOFvYcE+OFnyVuYnII5O3SAIgYZH7qy0seo/qXH3VRhRXe0O
-/rEl2Ama7qkbjO9VLKSVLrhsA22X0dhSF3CH2gUIRCbik6W4MjTVj7Rnrkctl6tTjhxkH12q8rDY
-eboDu+CWwClu41BzceWJw0x3QYXwykJ0DAbCscpYBElUpv8gYY8f+95v6o1LlNQC+chY2T/FQ2Vh
-zuheeKLstMl9+fkwun+fTOyIgjHqhKnpUwYP8FgnMTyRDqgW1vRNxaKaFWNks8pZ7PVKSNXYYEbr
-gs1FtaQCqynDN8JF7CXf1VMDKQ+wqArMKCKzdg4rbVScBzWN5kW1iAYyRIYAUAFi+oJyebYsWWAV
-crIgC2lMPXWcy3SuooBsqrx3u9rmZw1JoXbDWVk77cQTp51WM01hXbQ1rSBNtgDhHF7ot8IhEb+w
-xk4gdDcDbw84q5hqasRPDwbBnRDLe35MNnv/Z2aIodsO7w4jKs8lvrCsvHgOZsA9z/EPaD5VUquq
-ao7gBPWZJvfEQAoGVcliSLHlRcCMQFmwMRuUaotuGDAWmPV6bgptRpkS67CcTPiqYTJJFJDUdAR9
-Sa8PbSdQNn3p4Zs3CjK61ry9tK4RzPBbf/ICbPAL+iGyvWIaaXTzolJ7V8QgrSsRyhI0VjibULXV
-ycRdCcxgSmCh4+t/8f4fI6Ll/cH1f//uf/vLH/zg3g8frpv64Vm5fIhyHPsM+/dicBiQtU5dmTXr
-M5BopkXTBC7L6kL+NSvPz+fl2WDbXdDaufOh3FVNNSfHeaC2Xg8Q00I+tLA+9FdwaGN/+23c3RxL
-drn3Qz9r0y1cVbOcWHUPBbHg3ag8LRPLt5dwQMYC4dWgHaqkWzdlVhavxeQS/YWQGWWNQ6w+IbBk
-txJM/3BJ9TOwTgUm1aZ0GEvlt2U6qZLTFPsuASj0dIvYXyy1mfe9iL0clDSA2QVxkJd5w0k5MXn3
-jHzlTO9G8vCQ/CH2XCVjmbRC67tBZjFlh09TLz6OtSdnLGNfUiySqL+I0LjeJt5H5NcZn6Y+I5JH
-2Fh0eSwcPy2v9JcYLuDwIv2hiJcIcCThY6TXbhwdTxfCJXMBe2A6b0w+hdskA16/gG/lCYbp8vjG
-kYdkOM+dipXSPfjz0+8kUz++pf5vqWNGM3RFP42hnjqO+SEU+LCVkdAclByypArnM/PCqxHVOagY
-h7jYMyEB8aFd0AQYBtp1L6hsNY/Szm4pRoDcjrX1Fyh9RvIAZXlQHj4m2nsGor3UlJFdkI+VwnDk
-miB152O37skcU1tJIG7+fXkO4pBxXXtYGZXxmk+q5XzjZlo1JoWfoC+nOdz5lDPUyHdyZ4t9BPuy
-I4ukHIi0FEgArkGFDBxFTXmAcdV5AdTnw+gclwBPLZRTPU4byg1KHx4Ek2VKXPU0056h2LjnRsEg
-LoN4x/Y+RzOfQyzeJSNejB2jMYGcF+DfCUdfJCmZsrKlb4hyLtrtbZ35F+5dBmYET35EPxwrGSM7
-ev/mzdvDo6PJV4ev3pwaOwjYPlqcUH1a1cVNWa0b2Dy0r87pclrMh07a/CYv51zwydnI+C1Ok9jb
-BxnfOI4xTEhHoigKk7z61MMbou3DMLrhgB3xp3Yowwl+kASrOg1NSLTHOk+C4ZNVKNjhleoQ/3K6
-uzL3joCVnnb4RYjvDtwLmQmgja47uYEZtC0yUbI+CCjTewITqZXTS3lDRSapsO0VBFuf6nW3mFxy
-HAccpVWTUPzfPQp5VWOp+M5ayjzIeDjR27JAmTU3yktgPrUwm0SZJfjKY6Hm6YubKbSXem2+xj7a
-cfN85CZiVMaqSvIHTfDmJJByg6QkNAyTCbGYKgsOoatLXc2lpHpP6FjKjVdGgIp2KpO1pdS5bnU3
-00apXfwIPeLgE1aDy0YoSXYMTUBlAkBKW0KbbHFefhAPKWhw8iDOzMU1NEjRm5wvuURIrwSqZWrb
-hsxXvn3Iess2ItKwpZxpC/0Dw41KI3gQ8q/wXSvMrpStQBr3iGyktGC74QqAxyzgnJr2Druh6LII
-epmqq8gE0xUrv9E9NvoURlUXSXuyGKY+0Ojm+ItTSwCxdGvxr5qd/FTKCsaBaIjWaAKcGEKz0FVp
-4/JLWWVJaZOO4m6ZUm14Nn+4WGOBmLHTxmWs4oo+LNRIEHvkfUdxo/RoGO1Fe7yZ5ce99x8STiyl
-Sza4uDImraZyhIwBdHwQKTzEBAWe8BA6GJQOY5HWLZ1tQJuU0u87KOmbSi3k2UZZJiMd/Ah/Aoy0
-OVlGUdzhuJUs8s1ZEW2QKqns4p71PY1fei/rObsUaPngiR9WNgAp+XhcYaiW1HRZotSDVdXC2ZCv
-DKWe/v2VfEEZv1XYIgfbiYqQxCDFCo7QprDaCH3bVrBVIgnk9XLJOWBhvbqo8xk55a0wGzsM694n
-/A8+jn5bl5Sf+bwuiu/YuNsoN9gXdfVdsTQc9LcnFWLuTpUbilnZojDHexA99sZW2S3lGClKetkv
-ONMm/Nd+LIFi3UXxp5eaiENKxqrbgSwPchOqrlsnJ7MHVFOXWs3yNlTYZJQ8/uJkpv9PF+ENlrOg
-QAZUBrieBf6FSW5RBiuW081EFHRFL6VJm1+Ii2/LEKkqQ8udiLXeURFNXEM46UmqQrPthipQYRYL
-D5SYgD/JA3KCcSGGj6VVPnFK9W4mqFcVgqHqutDOzZ5epcA9/LUo3+V0ljCWdPlohZ3Uj/6+7q4h
-LW6vMP2r6RGm4/3rApOpYH5yjWY4zfZVkQq/0rWJTqmexrB9fvjDaMce4lDVi2s6w2D17PCQgORp
-YNT2kQhUgO1qepc+UUPjhOrXGf1t+4ex5zqVC8GXKVozHnEKWvwNZwQqZmjiGDuKgVHOU7Z0Ln2h
-hcrLRzUheON2FsWkPYe71vpI7OPerziTifgOVX0FJeBdcrOUpI+FpezNkMTwOg5WnDa+CxsOsIGx
-2432SacWQel11VdJmL2kXWUDzB57C8LSFuocRPxbTrB8gK6dyucSgOtdpRNxe6Xg+3aV8GiZ2t6c
-CJkKDCy9qQrjObAEoTscxHdxufLHIK8JyHmq0VsKRWCYWnO+sQKw9puD3kmahNZX3+WGa7hOelIH
-b5+NBhX/cb/5noK8FGFvAdrDt20OEu83v9xvKKWYDCwxqAolgBuyYlM+JzoqyDTmXd9jRQbiRiKF
-zaRXgEj70p3bPXnJjagX1SYd+JlGdUKm0WoDh3x2MvtRzCLNtuxfhhjTWXzXS1jXWzjrWnremyKQ
-zJ8oMXMQ4O3xqNB7zkvQpLw/6KbRkpjSB8cNpRq/Tk/TB9jy08XLt2YSMSFb0nUq3ofoC8mELj0o
-Axp7zpPJOayksAFOtuPCYNPbWSCU1QqvnFBlZUQCZi7ogS8VB3KhH3se+6xUFM20p0OqLj8RN02O
-kcEE4cswgIfJCk04jsyGl8rwmFTjxcyLhcKUpmyHIm83XDvr9571cy/ekz99DkKNgIpAad1DIsLf
-utjuHuoeJyd7DntVw5YcFH8E4qFE4Q1pylKfKTOAQQKGokC/zVskm/mp95qgsjcv3xwOjKMM/kms
-lQUlXHqsSqWRdXRzsMJs6TriTqvVJjGcOIzVNvyJljfZGiPjisRqEbhrxEE7wyevERfHw4izQ4yN
-lkfvnr9+/47elEtZJ4/3Bv/jpBSBnQL/j+mrbsbKGVfc5WmvtsFuPnqWo5jVzx5dCXKxDnQYKj4U
-03XrYzsp7F1lnhC4bdm0hqkt1u2KFvlY2bXEugd96BRNIJ5EOI56yUN65HjRoeA+dpwYEv6SqgVR
-XHo47zO+8reSn3pc9IH/yAyeNjw9U7mT6JMHtKW9ri1WeNBZmcVqRgCDoqFRoEbskHa9mhdpT6Wb
-oZoRFT/tECNx7xnNO+VS0ZzHybtU2fPc8qyBboh3YLhrjSdKuluFG+U9qkcWylJYzAT6htYD2IuS
-xJCI18tymremcGFTrgOLWLMw0xKM21xlwsccm/hIH1lWFuXAkeYdCrrvzplzNSHUkoUzhxgrO/Gp
-jXoQB/liD0AlZuhBpEzFox3/51B7V4oEbyDKywZYD9bqKWZc54AshZy1CDjPIBD0YU5w6GJ/m4N6
-t7pkDugyn32mgWxhf6pMi7cKIICVKHby3YkIJ6MC6jKITPgrkwHS9FQ0T2R5l4T/SQfX//L9X1ge
-gWxNvP5X7/7tP1MJ9rSHmeF2trq6ABJmnyH9sFwNdstEtHPOhm0JiHb375KWUZxgfwoiRkJ//iEv
-29Br3oDkQ6JCmrS7h8qoA5jPp620pjQpaaZEEDtnJ7Lm8HeQmqgvx05HYiJf5N4xNVFT9GUe4gjC
-Syw3mZ/hnsJ7aUDoRbEsyMWZbjKKW+SL3zkA7px86JwRhSaNERkvgngi22fo9aclcHr/9lUYQW9f
-iRw7yxlX8JROnewiIMshwLTIG5w9wX1sEDrvjBJxvUEeR0FshN5szckkkUFG0NCkX56Tp7wRJziU
-xZrRgIusgDIqzDcjfwfG6Y5h+eJwWFQ3hTj3WbZE4SH52PRPRrBHyFyvqcdoaDyElbbC26WnlxVD
-Ag8DYWavz6QTDweZcTAbkAYgwbFJKPu9Z4mmXA7S8OJEdXGaBywdowfTkfvBsouo7xw/D7Mr40pH
-tzcCuTy7IqkbekJCGRLqxCDkJmadaBmm5MVUv03hJzAhgxQmhsLdCXpinsRu/1n7oQ2FH3vtREIC
-AkkpqgVISqbVdAG2s69w9ZSlQTy+nYEMfmRMwAY7DExrMVd36+1cRjeMzlVYgw3FDItrOPfPJ/g4
-6j/TkFUNhQTvOjTT13zbb/i6brPMWRzjdYvMFeMY/gzy9wY4OZeBJkYr3+2TlDWqqQVfHHSqYaid
-CuU0El/J35ZT1b2ugsNy7cLaXmd2HXMn846lCyX1U9pnCexn75mKtIq5Go5jRWwiwfQok0bYLix0
-gesJj9fmgePHB6dZyEKwm/3fhIS3dPZwFeRsLsqIjLtiyQHAhHdAV7o+CZdwPoyMrD0qntZ4lobx
-nviYqjsRb8qQXT56wS//qhNkPr/NN81ovcQq93eCed4Fs7wbnBEZCUdUgGC0rudx+qcg8c66xdvX
-mcxf/fYbASSzvfe7zQR7R7C3yWWG6PWsmObocla2cSNCHGDDgwSKQUdtdEuShuCXB0HlOZTMwWRf
-6S6DiqIE67bTUfeGpb1vUKUTUi+KyvBnflOVM5aBRKHZdO/uvM5CmXR+s46E7Re0e+4lC/UI2jt6
-nXFCgRZRKCRXaTBQZ+J2NO6CxM5ZSnptKLmWedhZkz8NwJ1RicfuT8ICA8iq71A7qebz6haJy1JQ
-bilwgNSYsw05rIIuRprwgWViJb9Vo28j+bQ1pIxidxt0vL8qQIzgom0fDqIPmRNR2ocMzNg8sI0K
-6eD6f3j/Z8IEcv0/vvs//j+rVoGfB/MSjRh+PB9wNIoF77O4tMWCGxn2lGluxc2dlcvJajNENQWJ
-fbIGRlVPtOp0p5i7bRkqLc+kOwfogSy2muebCVehrhct4DXcMm+uhhQoAurbrOwAVzZSfsRjDtuh
-+zLddc/ZtWsif4YB1ORACVsRPbjyefldIYaGt9EoJ9LP8LcYg8zOz1ye9abwE5jad6nb4jaTfLnB
-O5fVBvEyVGXLVE7PreyAE1LKJKeimAmWeG6MuvbbwbwoipkEQik4KZB1Ekw2GfrfWToI1+3BAlN9
-lEx5kdmRvDsJa4JDwj0xURVAYZVlIUxU4WkNe0ZJqwbt6Bv4FmVXWD8C2vfdeomcGoWliShH2dAP
-CYcGhT+2w1BN5QMsoz4RtSIxBvP5IQZfTr5++vY3h28n0oYmIrxHlKe6GOEhN+KC4FW9IdVRGUJ9
-aWG7jyg5d+gSjkMRM6oTUivPj5DtRiZwEU35itUNUbOvs6hDk07DKp6luZvyD+0PO89zn49qV3VM
-OwufmjNVQ5I/7ujt2lGfcyJrmhGRukna7nG5oMsKy0FTnnZr5s8NFTnixVQ5hULGpnsoUagMUzKE
-y/HzZa+pEjTws03PeD7LcDAr9byctqLCdb7kcqx2xKYEeeCunfh2Qt86I2Xk4mk5UdnVw21gn57B
-ETkvApMFUhUxjNrhStvRmKBnbiFWeoprTX8Y4I6KFuVRgsrpnZr1FA+C87UtPdkAxZsJNS5mFErq
-DFSd0W+AiLAbM4m9g8YoIR8VURYwqkH4wuPH2TDm16bhr9uHWmmX5Ectf00cFiJ4gal7OtVs2a2M
-twtH2ijINtCgroox7lhn0VDYkLUeeEFl5qY2jgm7Amqg8If5XagaJrrJ8RQ6+Kb+0+Ce5OREvNIo
-JduJalLwCM1uTy4yMZcBbPe2wNyUgSNARHMKOHyDsCgvLlu6QgjZs1GOEueL1F5Qco/Jsp6tMCmx
-kbK/RiUsM4fjlKi1UGj45nkWFebgVvSB4Jim172VRlF63YtM+faRQ8U0A3I/D2vp5m3UZvWXyNCW
-VygItZyi007xT/HOyGdBXAUZZVbkTppN7VbfWeAzcOY5yTsxzExMs2xgRSgCj6O51fybAqtmM1ZF
-ShVrY2Se5UKYB3w5msEHKi50XPbvPVdEsl87emxO1UhfoPqrqYadhzHV7K7JH0nbtT0+9foZEl3i
-t/lsS+DIiiEsOVmM8GgNN/QCIsxtCkjkwq1UtDQ3thgbDUSyJ8zIyTVYMV1RPPiPFXkGr/RETJsJ
-7lCiHCA4PBrlNPQIE289bywpkAViByhCOhIVzWVRcatAd+BMCNXxlt1Y4lRnEjnRr8rVZirGiQcn
-80NfttVrt8Vsp2mvyC3cQrWc7Hy8g/dW3+dssqR5euvUNRkDY1bx+cCgZKnugX2ChoqDG/QUDlkK
-1cDuprX/TFdk9GUcPdhtJVgwV7ExrHIqaV1qwgGFMSjcd+4t2XHgo93XvkOfkOa4bHE1w7+TeESv
-ReCn6xsnNA6QCeYWGH9Adx14OM5AlvCV8ZtemhmfOtxPDC1DCkCNTOqCdW5RjSGXc1RtpiRr0nmk
-hvokSi4vSDA5+w6EkKJgW7xXiMlJ0qe+d4h/QnY29dZo51KaBEh+dQZZGan/OF9cjaLKJECNtiAt
-0jJSOx04dUseSbg6VY2eYUo6HoorB/THQZeSqNSaL95KkJvSArP9rop6kS/xmJdQTTlZHSRa89hC
-6sGyNYKy5XfBNl1EVhmIkbTR1RZmZZF2kOQH5r1Bf4u/PQLHkQeCeYl8rFkFAo0CBGx9kgbdUGfF
-2foiiUlDQ1VCa1YiUaMFoncbdDQ11ARjWB2xwDt77sZP1RWVnZ+UBZifA0MosFIYiIZM+MQbxLVT
-HHCktcQda4TpIIyzr6sbw/uNXIhZEMCucGeplQRUDjoEYHHkWL2btJ4O+0Zm1kTFiwByHLO+H1o0
-lW4zTpmNe0yQ3c3CxkQ2JqzqCvhMu1F8j1NFfAYRyDym8MLPjA/s6hwtLp97AHwpkl2vq7ZQ8li2
-Xjb5eTExmL4/GvYKXG382D7/IHHWY2josLaLoFoMQWXnFX5SV1Wr3ni5BExmHLA0S+z4t/aOU7+M
-l5JDi2SPIj5Nbhql4NgMrdNv/m7wA1qUt53xOmhJNZge9+Rkl6WAmcoRuXLJOtpxtg4VDDeZHB2+
-e/9m8uZ3k0lMde6FxC7BOFfaPYDe/ObXVILWAeRUY2SB6beYUgRTba6Eko5RrSPGE4ZYYp1IMh8B
-UQyVU0V7KXJFMwVwAIJ0610AmwXOv3R6omJhRuY7XBBQqwEeccm6gpNz0QT1YGXYRqdin0Z9OR/f
-oKe+cBH2ExuGVQ7Fm5iLbdlUKDxLZMWdfizO2WZ20eGTYyaLtprvMk30/YExyXJr1hhtFLhRsx70
-Yw7MwFA7DpyOR1OAqUpSy7HFp6BNOUPxb9kwVtDDoR1+y1tHPBN0YgfydtzgBaNsvz18+6vXR4cT
-jrbt/Iaib2O+LvaZhenDUS7Re7ujaAmxiJHJImR+hLD9R9/4bbsp9GOA9pNvqObqeLyffCtSUVD8
-IzvIXwmengYUuw5NQhwL2oJMd2hkIZ6jLHoJ7BKjPFsY2nxDW/lN3rTFETMhNspf5o2EklNvIyCt
-m3JGBWKAYUarCoMiyH2I2IgoG5XfkmPBEt0NOLeOwTwx4xVoGhO+fcYSoCaj5Bt3attW1bzJnNAc
-uYwUJi44JJkCpX+82IJccO5Km7LkhyJkSfAspSyYl1XZedFOLwVG4TuVqI5N2xISZtytJ4SICSGi
-SdQOYj+euomtuozCdMlEH5V8uzq9zJcXWKkxrjG7M/qD628YDN7voqdCPkssEJbIzU1touRnQhYA
-EGy9VYP02aF8NRSNZXJAfEceyoKqgb4UFPUHZpkfu4syQH6TSLd6ckKWS59S3GyS6pPu5KQ+OVly
-EPkyBklYNkXmBHDQJ53K/JpJHZibooO8kHlkpv2Dj7SLmvvcsoBa3viBbvrkRVdotQCbPvlOQYId
-JDSjCIK1iJNAHYTuc6xjPqG5hAoXxLXBWXEeGFm8EitsfNlVqAM/Cayr2sC96+roIEEhvl+nJ48h
-VB/HnWjuljLEhx8vXcipiczlmHyDzEx5U6R3nIQJyhGRJBtjyQ+lkaEQCTmxOmbX5D4POlMDkyVo
-SQmORNmLptvNnKJYqD12dtB7M4QtMtbdk6Bo5IygH6JMQ88hARTHpasbYqF0817LKATXVV/QmVXi
-FvMzV16hF9b8rCRuxsJInPanjMWMsTvC5WOaCt++Idgo1vUX19sZyYy/p1jiZWYisb2sq/UF/FvA
-0LatOQYOY8M4EksmHzTx56ADSeQysuk4jKl0t1tdh6xO0/5ddc6xkByeJJ9jvoUMhiKvufVONyLE
-DO6rvtz9fhvzZ9J+fShtdoUwftk3UgkXqFP8Mg0lZ1P9D6M9D27pA94LAB74mddlwWmyMcNWW1JB
-CYrPGeLLVYlp1y/RcL/iBBTwPotYPAW91YFY5E3JFWFJVEI+q9K7kzBLYVzn1XxW1EJoVd7tPjAM
-oZhSvaK8xsBNzgsGREn66rpewcnUZO5qzY1zrkmjL93MIDZBohN44rp6T6s1l5eQLhJp9GBnhwP6
-n/YvmbfoYpE75XG3/88dBQOi6JQvQoqFZ9ezrzYsSsA8fel2C5YN0iMk6+DnyMTOk19IGLa80yE+
-iR2Hb4OyzbGZhKtZz1s/u5IMjUMYnF6ex5cedCZwCUVnGZEwwTCfYMhcMHSCB2pmgfHwz000apUa
-6RYGx/hb5WydpDvgOH7zm1+PXn7z4nW8A5Yt/xJgNfJb9h4h95s+HwRbFDR6Noh2BSpbMUtowJQ1
-xUOGljs7zLFubGaHWdYeUm/4rMora8QlNwWFpQcyyJ4cJ9n9J+nJqWXRNb6VWhwyMSdVbrmc0GPX
-48Wk3ODABfxgILEMihbmz/AsMmoViPsK58FzBrsl5WCQ8CkRu4BAdgH5Q0RHMX78bu9FGE1mOc0J
-+yV/fkv1QlDc4SZwJOw2oE1ZzLkucxdp5WdNNV+3hYxd8GgLB3INEijWvADpTuZALY37DcqG6ik0
-6iOZS9WfNw9PgOwaoQ6AFg37qF+bo45j0Tw+1TTL8oUwFMgETV1wd7x7MDxHgrrdPV3InOuAG3UJ
-Gi5nBevZwFID656WVtKUzIFEkgiaa9GhOLvIhpzjXoT6jcez4mZPeTL3XYQbqWwNj2qNYyt7smgr
-iDhsYuxOp0b3o0dsuWe5DSsemcUZ9lWuDuWuxxvc9SQN3VyC5BfwDVPLlSqSFWliPjI3kz8DPXph
-ZLRd9z/vWDsvoMTtb8/oJG6lUafZitJPGK++zyRff+2pLtKDnKH4G3BgUjYtzwnRTHS294yz9YqI
-AiNVdd7SNXsoPTbsmPXyalndLpXR2pjCrt4iXgzLjtxhJ9974P+UFZYcTRvBLC4KOdHZDoOMH8Rq
-c1LU1B46WyM2o/16L+S2aWGd+ul1nbmZkuudDETQk+N6Rg9A3TeOye500wKOJZsFk0xj3JgGIz/3
-702lVweRln/GGGAybpP00kDYimZCAUaNelQnFB+CWKMvhlGAsbxfAsCCVD+3Wg0iKkpoU6cHXRxF
-L7lErbkNZdSK2IBY6QWjAs/LeuFdtptOViqAhpiwFS6jk3uJfNJsubJjmjKtf7+hWuSNLOVO9hOq
-5lhjTZ2Sv2db33o5l/cy+L8//MEc7x/+gHsZeYcB/G1xznc7VcQhfxqsgFmt26acUeHIP/wBLxk5
-fBKgjfQF++W60bgSfmwlHszaBGfa39AaQGWedCpm7pJOgsAHcFxj+i4ROmUY9Uo9WxTPOUMV526K
-gJMVI5m0yZiz4wyndjGJF5PyfCIsdEFvpkD0suaratHtI2LoRN0rsiPzunFMkcVjHLjPgEUJxKgN
-rMiVBl3shYfU2IvgEvXp9Bflqt8KYJWWGO5ukHCSbVPuR20xM27MMEFSXpOKiEIyxqcePHwIxzFm
-DTkr82VW1RcPpxflCOvdwnPOzJXBkyfwa/yzR//zj3/2bwzPP/zGm5M3T3Uj5WQiW21YS7VveN/8
-bvL103/3+u3QUjyLvNnIO2+E7ucYl7OOjfHdFPOKB0MZkMZ2EHjvAnVVnvZml3biowtECG/W6RIa
-CBpVg/Ddg1llnlCOJJx6wT1+Ap30eHx0re1dvEYEjND0B24st7G1MJdd0oGs7uxeavojYqpdyb2M
-zBOw72VBdy+1Vwe8HuPVrhFutPMdS549oI57jhCWdkVOC7uC7MVdaAl/RsZJoK9CVY3r+l566VLr
-Rm5WG6Nd0O6QScfqouN+Ts11IorgHa/YxW1HBhsmBweDFiHsChj2IBldVlJR1dM53YmYAqeGPd1P
-oZxtrYnfksvkR3w1jQ2+S3F41jT01Y/L99Od2J351VbWYld0shma2fvKx738IS5vHVDDO90pxNiZ
-zH2ZQWcOZgNDxyEl7hjh3MweYgCPh7pBV/ApxTO6p6WHadEgkj48apNdSv8FF4TjxyBFRvbvKepQ
-ua3zSyP9ezgJEjRxvR4MlSWxexhHzppE8SFIRiQO7Deu+UYTkV2rKkpIXef7QLODofa9HUY9hBSk
-Oxfhn0Z6LrS/K+pzsGBV8xWFqtHVvhGOko300LUKiQcOH9EqNupdq8SbHD+tmxx42RiFO7Bk5kbK
-TnYO6fq2a/Hs6zuRrkkUTgyXpkI9DlQYGMyC7Mu35fLHj+ODnblnX4/EVc/yNvbxLUss7IZk0/8u
-dLaTYovCt5mWKDvKzwvzgbMz+SPa1+er5EV+VbwghtOVP9UfRBqIYiSYOHpxf5LEzlS7xD3erJRJ
-jKufEiTQXBfNTjDutEABl4GPJYq798sq0YciTj8zwGyRL8tzdEj5nJBHwlVfx2GE4AhvFtPO0pv4
-xPneuA4UaVMmtskp4BBnAAy4KRtvMwkz2cEQXihLBl6TiQ+l7YKcq0zIe71Gb75KMqt6pVZ85GJR
-tp9zngzxzrPkz/4kcxRJHNwQ5N1vxzjwVSaDcIuyyRwRst7AfjPabzJMYGqbljpup1IPUNANRIcn
-mv31xQKa4Dxf02YF3AyHmzdXSfxOppjYb0SsXxYlZXoBsheMO7lNSzSWJ2cppeiLelNPOK4F1igA
-RlyiL90t/ucsTgMVReXIsM5ygN15a+HX1iQR1gJ0G2/JxvEcza1IcwHnCGsGPlOrlBed1XDLiM7i
-UNLuhlPdiSsCTobYA9abxq/gK9o5KxEr2XsjFrgUc1bLeqeGlwawYAZJmkCGelZ2yW9rGW1cwNYB
-DIh8mdlflSuSCWyoREGqCWXQe/Hq6bvD50490rLepXKve/3hp+yGp2I6whl3aLokaq/ecGwSpmy3
-vJKFB2SPW698K4lrW8yUGCQxoV2B+/YCe5Jpp2VFuatgauDlBBZClnyspcLT4RMKTXlRjPV9KZ2l
-OP0unPekpeoffaDIMxV6nU/ytkXAX3x4dPg8+sUvokc/A0n3i+rf/PSnXZ9z9Ahm+GBYtmHZRK1y
-/Td9UTvzjetAgXHUnd6xW5brTOLrRCGEV893zNt1+XSUQd/6idy5unF46VJ3Uwd1854wsEBo6lF+
-U8x2YNI6u5AzQ7M6Ot+d+WKAm0qL2z0wvUrdy889mv1+TVYDcuXGz9l5gj8XN9eJ1XUaSo5GieJF
-gwePDk6DCdTkvYkxqKG9U4JlTu0LVKmcq0InfFcnfzu+b70REVYuQ+UosaWDXocFCqA3MjYEkrHU
-xbSqQ9lY+MWka6dYoOFrMUwBzzE1e/HKclJYFQZty/5ZGAr+9GMRZb5oFYL3cxmENd6vf7534n/x
-mSO8tAuJiBg8jR74vVKBWmfdYIf705ZoDMw1Ho0wVHlejITQO5JMe74ZgcKYXxSz8GdqiZ0l9X39
-uxJih25fjHx/8YLvnNd1odKZksM7ILSplhQ7LUnl5xRrDY82+E0IKuZruzXjwksyLJHbASe0jDTd
-5cLZirozrtGbzYKMkOIlgkKOm7tucB4xPuAoZknOl0ASGBq5g7nPdiFXfgW0N6bz9YwiovEKPx7e
-0RtehuQAyfjJpYHBnN4pE4G80tcFzQMJDrYkIAhu6lBYt41bj1uG0+9/tlhtN3AtEEK9w/kZDkF0
-NlO6tTDlW/pA6YgqnSOXt7aBdURpdh8XZupbr9y5UVGnpxvTATtQUWdbuIAMQMH3wdCisKjmJD0g
-1+RQO78AbW8dVNblnlEiYo5gWs6M5BI6j5CuUSEWwdkXaQ/jk0krxIdNtShuMdqn5xORG5lYJCaQ
-XXINzSe7LPh5UPijZDdo2fXvaD/LuhsCwLa1t/Ot9uwMC+4DLVmABOa1VfOT8SN+GQFDjLYyVqQ7
-oE/hKOwxwApN0DuBFOg0XLdD1YxVo+cq9NtGFGRRvllqO+/RFpXetcdmC1wsS5yzgjqo2J/K3iZc
-5QPJ396yo5aRloXd7cgR3vUGHArPtw3GMUSLvL4ynFpJDSdjUjjPm85H7gehcn4EcvybENi6C0um
-H3H8VvogNobfdLzNPdhPPBeIj6SSHf0upn4GKysypiufI17UhabvtHPtlsaA3JY7JYd3FSHXp/tO
-GtE2YUXeivcKK3fJlOSx4lsMxog8iY+zVINO8eRzpK3xOWBAdYlM3SX6kysq3pgedOopsVgGrm26
-rEazYtVQ4h36wlnxdDj4bPl3AkbSThnvrily+gUobf6whySo3CncJ+vTi+6+ef3u5YvfWRyN64Ci
-g1Qd/14oWlF2H9Wy+Pd4bKjf8e+nMt+h+H38bHparTbySYf6gHCBMDTcvd8r3+rs/okWxaKTBHkH
-DLneyGsijEku25P0R3s94M82bTFimqSU9l1N7wGnEPogVTwzS4uBNsY5uUsk/mKB4Sn1At28u2x5
-MA/x7UEkb0Ypu2N9gDai5SyXYjXuNS1Y/2jv9MC/G5GVF2k9hnY1sIA4aa8rBnMO+kVTYTNK6EtB
-J8ZdoeOS7Z+jL1BStTKVWc72pNDq0B0qvy5cqHUiHjSZ+UcuVaAs2qA3dm/lEBC86mpVl3lbzDdZ
-6Ejelo3QvmDy09cFyqrYbsuo95p4CKST5lpdPRVXgCZfIF3sMMIgLJHs6plAzkEov5VTcWXHSVj3
-rp75UbOjQDCklIW6Lp2N6i275X9W7Y3YHZ1qpifLMsLGpl3nLRGksu3Czo9qrKOxw2r4ZWh6c5pw
-uD/ZMyW7cGsE3z2hiQS6kXKCAzodbMGmkdQbf0+8cGknxpbu/ESiD4YlPlQ1XvysMtJbU2tUOGzh
-wMbg0i5p6GZZNgvfs8uMuhK5dMTPAL/mULC6uHFFWCtsiqchLmktJIcv2sTIh1bQlQGlVxcVa+l0
-3KGTioApjlJTXXmlThR+VGopF3xnWUmud+7nmrLBBjVVA8lDgWNjlCAOci0gZyiJ6DXd0arC5YG6
-ljbebx7sN79kSTwxQwBpSLtEzgWq2ujQ70AJtoC/IBE5X5l1VQjyJRNRXI1zOJrlhvyd0bv9FO/7
-1L33sTP2ReLQXBS7If3dIAiZm4GaGi4x9Lupp47KuAvzF3d+2FTzbIYUTsUSw1vDFmKqmKHWXDDA
-bO+shG36CvFJP0uKqYhoFgP/mUwrELumslCqfUmTiHnZ+BoG+3R8hBMX2cZvb0bp6TYbkGFMONjm
-yWCkaev9TLJb71rcxE/3tbiNPGmvw487XTS92K2m7Ssqbhp331Ecqs4HHyVGtS2dwTl1MrD/HG+W
-yCcdzVXSVIVGKiFdbPP3Mf1+YLgKJ8F8Ajt7JIQvGdR+3ooTtmN1JsoPsyDHFKjy/va3tpK5W5de
-5pYShMK03pefeGfKVMbKkB1t93KGIe7hPQt/47KU0OPunD0ho+Qdc4zslG+921sGS6Xab74+PDp6
-+muqkhrHJwPKJ8Jheg0XLZmxIUzUl24rYCezciprmIjtgyU+Vut2YDTNBoPXS6x5ghCNct26oKOZ
-14SbBdJ5DxhtsyjhUPBoU61l1YRWDjbNBjB6Vcf1rSlZb6ngmjq1KSdXxcY5LPg5TBrZ4h+/14Cw
-aV/uGYKlm4vi2s4H/JQCJfwzCgBo+QOhORkR9LeS2erBHkN7P8+Z8YmJEZmHTtW1RX99f1r41B4Q
-tTDrkBUtNlJwoI0IBrB1U5yZmT8mMDv9XE4O/k4Dy0IzRWkYu3EHdOEPqGu5FCwTM5jA3qcVRiMu
-GIrEB4LJJpTt/io10t4fX52mKa+lmivRTRqduqOILcL9437zPUVEoQWQdzr3moYIHQPitxYrNgQO
-LcaIWtHOr2k+vSy4FGmgYvHqos5nSpYuycd5ojZzZ6Vj0VBlICuLxss5QRjqr/zEDE/Jl+Iv+7U5
-LYyPNX52NKQZm03pgVu5liZOpWvpL8eW7CAC2rmPvKLLasGhsbX+qRFq9DVsAMqQcQ7CTJkDwdVY
-C8hSTF2AE2pJRApMywk1wC9nE6d7l+cFlkvPyHzqrIzB4ScSm4QMtwOrpYk1tyEMlGJMyBFxveIW
-26of4kfYEm2TbIC/NtNVqZlngjF3yihooNNJvU6tDrwUmpgIi74wiy15bDiWW/oYHWexOZoyzWEi
-8FMzAGNmrZdz14ZfH7hOjsZL24Yh5uUXSemkDsmBzf56glOk8RHjx8xBd6Ug3zXpFjlDPa/WVJbZ
-yEhyUd4UywOOJGVZv+RMV9jheL9OwxL9fmROSPjgOmU7EyHFBiLNPEo6Jq86G/Md5ja6FsnPqjWI
-wFTRG/3qYcd+R9NvnkTRIaVzm0Q3TRaNnngOSQZ1RT8cB9ayjyccm5+fymq+itbcZWNas2zUtq1S
-6plW+JD1S3Z2EJpIcFtGBtyueXRms7dcoCybcod07ky5S66j685uDpKqvDahTdTXt1QhmoAdP19u
-iEWoOjQ7cbNwanwLMbvADk1kB9C74Lxs1PHQdXthntsBVuWc8lp5XuXL2boR1SSsZuk2s4r3wW5E
-ttUjjTMhui5psietsAeCJ+7EH/ecHjBvKB2zgb5QeI33BjuEoPloSXvWGlfX5aN/J0ykc+06ePeu
-gLcyIB9+Ly8+NaQKWuzfFBuRiuubSuJA3usWkYiUQJ8O++bw4/K87cRP3D1/rSNWrWjhcTh0WH7k
-Bcsagyb/M9NKbRakUUfnbI15UaFhw5YCzrQqTlz6FvM+zoubHPOyYQv29DFBScO6dEU/AzA5Jamf
-YX44AEcFSJtilddo61gU7WWlIQimSC6R8kAIMkpXGN12fugr/MuSZRw6BwVsj+6Dz0V3KC9Lz9ax
-bJmtqlXyxS7XNB4EknYDn0vMLzvdj82UdJ6KhE4JQsg2+uupGeI09HLchaR45xvT06Az/sxU+Lqt
-r+5oXAeAnp53hen4RQTvVbvXsXOZ/Jjfj0WW5+euBXNpeldAol1s63GCVf5GI6luw14Uf3ZkvQwd
-X/Y0Uj/WZVeCC6DEKjTs+1N0LaTp4em8C9RWliaOQBi0vZhb98hdOzbr+KqNq4W1rbgMF2JWW912
-E+/89g6KoiFVhRNWRvGuITexLDIpb4a4XIeZzIg4zR0gJiD2XuarJmLXSSmFodEcOprRsY6HDhyY
-AFe6Bu4OH4MIYLI3GPmkEUAMiHoUT56kO4MkU6a5LMPuJTYjRVc1Hpj2PTMKoZjJqbO6q7z9DKWf
-fcMAI+Fum0XPRP30YrGKjAIw2nKJVPaQjky8dGAfdMtl7b+c3d6h8F8O7f+4D+3/DE/rjz2sndm9
-PpMOAXxD09eNebLuIgWgIxNuBP+x1FO3J3rvGPZ7Yl4duVSwMngrKsz4fYcSi3QTmdPvc8FEoeeH
-a2cMfaj7mEhR051zV+ruXvdPE9R4DipM/mNls11FMK82/e4coXuUHUUg+/1LbHGvx8uE/O21f4T7
-4S7ir1sBIixa9jF2o+HOzN2qM7/zcpnprnrMh7ut2d1G1M+G2H7xy1/KDCnNpsE7eZnil8LpOOGm
-FVJnf66MvZTOnq4+sCTREmS4tqgXwDAbLD5BkakdIKrpdF03IOrISuycADJvUTJrKW+SkMDay6ID
-BpGPNkYOBp++LbdqLuGRSNOSqHJMmyNH75XqXJQ9BnkFRfSLqsJy0lxaskLxPSrbJ1H0UtTowGia
-K1AdsrDnLmMkIG3171LL3cfIuSGj5OK+UpqOA/4tjVOe1XL96iJuuLxUKQoMCo8dHnJBvt9/kF3/
-oa+gZuCU3Mp7tV8wawtcvtm0HrvqiHRBMAW+7t3dvz+DYYGhw16IRxi3TZl2n+wwoVfQLnRM934q
-vMDM6iwBTAf9tLeeu8E7Ql7mCUBMyJdbE9luW8iJm/nq3bs3pJlv756ciylkcioLdT063fpV151K
-h/YvdXrYzsTUdvbx7NOIi3R7KvI73dx4gvVd5oUoV5Mji/77t6/u4svqTY986dNP2VSK34XlO6UI
-Y6seJcgQFJ2N5Mcq7Rht4AxBwemfj9N72LPePIN2ExWVR/r6TDt/2CedHzGUbt9apPVLv0CCne7G
-DTxfky0fk8iyu4T2SeKxzf9IAaOqU3BE81UdRihRQC89aXZC00cIjJ9HcNxtH30ysj5+iuV5yHC3
-HalU5BDN0ygmQX8sfMDfqEdwZHDLkcGFTsvycAewRuaWRmTA5JQtUlsVqaK28IVdMBOyWFKlurvg
-ur/CZD8olPObqwI+2US3+QbZfIkatiiRpYR+PCFAAjo7Q1TgbSiFD8+KD9uWVvDVsBVjR9FhWq02
-+KXWDfssBR9j5dKL764WRx1j3a0ov8lLYAfnWJZA2n7FIbi6umh6ga6KvHUjs1mDkcGe1VKZt5J7
-j36ysx653dD5Kaa8Oxs+/5QG0E80hO7ODXc0jJLYpEQHabHe7ZRytQFUmUDq58wYSGyz4LUbHeSs
-wOAR6lVpdlDjvt+mnQJMUTEYmRwqbQd9SmSXD283dm33U2en7Sx67KR1CBdVOz7esEVzMCtiLLWC
-43fRPr7FG5k/qfphpAAjpzaZ22JXvYWztscvlzf5vJyZUj1W54wSjFrjRTDrsgopnURzh8duZ7K9
-wWcf6bfbjRYubHxuZKf4PAMUIvI4FDZLqLnL2XNX+fjOsrHP04nsnbrJPc5q3Z7GAbinHuO+w0VA
-yM1e4mYbTrfZK/uiAnbtwxdYkt2vY7xrmIcPH/bazD5CsLlLckmdDYYRbd2Xd1+GP8PWmJeemg9l
-GByNz77jdq516EPOaZ9lWbxT4qyAT6BJG743YFemOMNbRDRhxcWI8VC5SrD+IfIeIdcbqPfcDyUk
-K/DNuJAwomfe6UBF7BqDaCQhYdnWfA6yNWVjZKhGkXVFcTv1rhR9O5EA5xDQ32wL7A5fxITT1Sls
-Uzf7DS4v3RcG9E2R+M0eYR919ixH2Ds+cYO3ZPSWSN8N2zOUqLDT1O2aU3oCW+16LO7G5Z3lbF89
-fGAPnJon4B+l7ko4e5IReSZd/f0ANbtEuUjcJYtJ7EX7SYB7D41RmAV0A9H4uuEwdL7ojxERtOad
-BgQ3OttNb3e8x8mG94bR3v7/z97bPcdxZHti8kfEOtobG7t2OMLhj3UJXN6qIhsNkhrtzoUGkiiK
-0sAjkVwSHM1cCNMsdBeAumx0Nbu6CUIz4xe/+1/xkyP8Dzj86L/Gb/vgPF+ZJ7OyqhsUdX0dvrt3
-RHRVVn6ePHnyfPxOsycGh53bFPm+g9xg58RzPLaaZAuU9lorlOG2Zg+AFpuD7aALhGBu1hjUUrAM
-fH0ewKBAJyK59phauadQEAplvR3N/eR+pgEE2NuqBa+ka2GbACo/GLEjwKcVwrgxDgIL5j1dhtzF
-kS4PYw22ZupDIOWb04xFPA1TV7EeCVDMMrrrXxTICFWcIaoExWwFqiDRk+T6iPT8o46XdN7dMD4w
-evvGuCXwjfKuvgprHZBPbeP7fdDdPAMwPobVMCOS0digYxcffCyRfvr0dm2daN+Qm2Bmcm32LuLV
-2uUqY29yvnZhKz8UhGlzxkidoPxGLkUdfcj7+tDlxLLV9Va32REe0a1B7BQhew7rrXuneyb9ulFq
-CXfB7tCZqHuAEUBY3UKomkOXgM2IAm+ret045VrP1alj/bT/YTgoB+S53SpF0tR1eLOAKe8GN6rN
-I/gAvY9E0/QOtlNG76XBTRiqnXHc6irqpDPKWqVQCy38UQSpWakUMB/EtJxU6LdQQT5ZkNpX5hT4
-DPNdmZ9l0VTlEm0OZeFF/CzNjNeXCpOaQEiNLDU6/ekBIpNcQreXlriKJpkBLy10NZBKsliNBoMO
-9wrETrEuBAWBnGD2B3PpEMhAdivRdQhXH5pC16clYslzZLDk72Rf5iQ5XCVX2ADYUnQl1UpNAPjy
-THAUV6WR+Wy5eGawYAW2Sg4GklMcW4swrRj+DI6LligvmF7DxLs7CTTY/vtkIPslso5BDVvDH8qk
-yAcHiQXn6yxLBmr4qwc2kfmJBiXcOk+ZjIJXi0cRAbCLNMhzpxUo/O022kBoRy7auC/EgQaCPIsl
-8BLn/LWptjZS4c2sBjzXQzXVejrhQoX3tY0dCfOFDWWWNihhZdItLBp/Nej7wMfK7MC/7K3hfF1N
-w2/p2e3kz6mZkXTfzs0N09JoVxUzo1yR+euvnRWFGfkiFN09IXES+Ke8gIli7S4bn2W/7VSA/5Si
-771S9EVnNKoO5w2139ez+KC2ApLtG9vPJpGhYxtKT+h914ZWHooqLOKolHuVCLItPvzq5ZOvv3s8
-/u3jh18/fi4YdKCwlQRigDmnTpAhOY0IWhkIVNei2hUJynxPhYo56HRV0BJUeL6s1wsjQ/2xXmMJ
-ubCg8DarXptThKLDEe3ugvU8kIl4QIrkpZPf6FCznSkwQowwmmae4kFEQCPUI06diMJbIEEv4OAF
-AQvn15uvk5iu/megHSnTcYB4FALdLh0gUHq7OTi43fw4F+skKzPhr3aa5nwQrwSntlETdgVAggod
-sOaYpGuc1+IU7B6e6czLrfNB5+OXn4uOlCRwBSC1F94YOwMVv2iBS7E6CDv3T3k8kyCP5wDnIohn
-CWNgNBpqEOYyRj449vE8DX00hnKzroTjfaYL6VJfvcBqVBJyQRG1AOTyXiWlsmX4IIsCf4ZFLXse
-sAOlITU0iowkFc0y/VMG6uG/wH+avxCaOiaXHB3KUAiJ3ONr7hglDxnCFkzczuIHog2GXzzY5rWh
-WPSc0/cjfEpJVQ7kqxE/dO1SiYHoae03CnBQ167G6crSHLkBOBx3uDWgdp8fZbqQHZj7k2vihGvj
-+fryFEKjJftaaR6UgDcioO2jZjGrVpg/zM+a5L4GReV970VXojbm7ViEe+BZxm+F1+6WeCYzSLOF
-BjH7U9KqBHlxuupptb67TPNotyB/qV3PtI39E60pHoFMd2344PjB/kk0k113QLX6GJhSq1+2Pv43
-PUhbYaxuT8l09aRwu5U85/x2CYLRvHz+XU/HzH9x14HDAzIfftUlJ6MZC+ih3Sf+sncSPT4nP/iG
-4gY1lC/aahmBko2wCtVzgg5Qe8ltMGEUoqxu95YA9qEh33jXppi/66Q9cg7YXc/NqRmSFWQ5Smb1
-/BxcThvQcoNGURTcM5RVjFCDsgwCPVTzQcuf1RpV2Kmjqc0HqREgyXNvFIgbTbNpLGedY4Hp3EVP
-w6220VlsGykGc8M9pL/kDeR1yO2fWccGUnF6ogE1Av68muAGeV2WRmJfgo2BxXO+GNRnYS0ku8+n
-5HbZmgoiu1gmTBebhl+K4Ec5njasS9W5LuiqvQuqmG2WpfrFl0X3Z/OqbJ4wrA92NF5koKWTTZO1
-i36u3TMT6XX7iw/b967FDifVMSu40O5iOBZ+BLkI8GbgeJg6x/PNK192UpA4qcRIo6uqDkPsFoTU
-r3CNLY3rXneNXX6dI5hKFzXe2WaHyDWUpOHg6CGHxUg92/L87+kbtJ6FXYjlegE4WC0g20HZBALo
-w6o7rMRggMOBzxrI1XQXpO/9vb3TWXEB//vSnDq3yvPzg2/q+rRYCoCdOmCwjoxe5yi8oUghPhF0
-rOuODAL3NIJrgsM8lnnUEwjw9I6HJ5urvuSoqNA6hQh9qLG5AA2NjbEt5+RFTIh8Az+gD9P/rmoJ
-WM0HLiRsO/ANvhHSnUPibNFMQtdzlOiX9YzNJZ6DUlcjt5t92N9BDcFG03mUKImSX5yiLmU86d1U
-3IU9fSLYZhyVxFpQr0G9dxfUezqeuE3oW8eqqh19YJgbKy1OSzaOrhjSCsnUUNgwOa9W9MfFOfwL
-TOz0pyX8CTOgaY4G7tJwofcu3IEyM4Jhcj8/vncicz9Qlxqwy8FlTOxtXINe8G1Gl35juuaPDoOL
-zEiSjIdjhNpvDeVmPCjz8/tyOVkvq2KWZDRA2iBfFT8VZidmPNQcA9bWS7B7QZXrBeR/LafxKVAX
-UpHK0+yL/Vt/uTW688Xf5LDTs+M//c3JnTwdRj4305IRDARUwx5JdE9DDWh2P6dObp44q4GkJLwL
-vKiqBmV9btn14Qd75rgy/Vh4fr5Y2/HugxO0Cqer4rwBc8TpsphDYBz8bZ65R2kbdxS3P9bySSBH
-8OP7J2CQSFfL9fx12vP5g5NfYDcw4B2mhYf0hrwpPgNud7lugMuZCx2y6mek7Wx7M8UIwu8hjcNf
-0IEvI6+dPhU4pToJvuiksD9lhrpyQ2e70/LtX3Z/nI7u5P8GdTuOrvA7zc9fwIFupOyzBL4aJrv3
-Rg8MTa4mo83dZTaMxx5wJ8ls8VIcWp4ZRu/ltuCT8CH6ANZkg8LjAN1rnRO3xIFa1xhyHAXt/CB0
-i5EzMJIyA9J1htkpqDWwv6yyMB/Jsjxbc8K01ssFxtaFyRimlPkF/gm8aoq3ZXcWIxzkWLripUNY
-AIYLsGHBXHZ6Q549+/dzh2TNGdkJSx+/odhKuMAirIyt1kx04NK/d1lPq7PrIUXjkd/Kldk/yk9G
-tyrZZBpEb5llZAdXeupiPvYBjH0PYdhgMF9jW4OdybzPoXIHshLbitkpu1hhpgAjYzRgFS/nb6tl
-PeeYq51Bn4eha3bkg1YLmH+pwbOhmPX9HWLUWUv1rWbHD2xspYLlnBDxJcbVOwi74bsbxKFmgopc
-05FgDUtmkY/cRoGIqaDpNrfVm0d94GW/WPBMmtEa4Y/MgWaVljqfj7yJDJ7f5L0DkUKte7itWAdv
-LWLurPLmWD6BuCzHz1YXj02fq7KJ9Cj+Oc4HDfRGM2irV4ERl+ZEWCmaaYIIIyoAhtcCTU4rYKps
-4UTuAOBQZloTVj7XZ1SNs8uU4AkIASnqQ7CHEqfiWkzFo+TwLNkr9uD5nhcZIg9XtfkLra+rdyvk
-QKc1MCVbhbk/l3CYooim/emMRLoE33AkbM148MU4zr8JQ4AaaFAyQyJsMGXUgbnJRmDnwUtwfp0d
-Z6GXFDaENBwNekn0RsVbsv1gtKQLsraw5SjQ6Ce9nl62LhyKG/JJ5M6vXkc2qoDZu0KKS5vhNhdx
-/hNLUxdL+P2Wg1Ds0EBynC4BQANrPL5/Irobbz5cL9iDZwuQe0MHzylgzpLiq1eOTb16RZcW/pyk
-BU46Z+t49Uo3YT4xJyT6c4dJ0W38dXCW9bPWFgqkd1Lh5TJ+4mzl1m/JnoibYxwdR6B5UIdRy5cc
-8oWo8cf0JM2inqP8k16nWyhW9KbD5rcJD4i7zam2i+Z1lj6DECZIHHi9N8+/gCxqmemTuVjMw0Tg
-9iAg7rkRVPQJekhwBJ4ZAAk/8CMTKaI+sxb1Dk8ruz9by8DdyN9vKtBjQWYCl6HjiFLiJfiDI6O9
-fD2Fv7NmfWb6fpDuWspN38M5kGbgAL2FNsxCz9rPy6sxyzM0PcR3un0n45GSeHiTH5+XGLW7npac
-LRrg7k+W5DCXkSVeet4xdLX+5u+OPBtKsBFvxXwTfb7QKTbtCpqtQCPuYiE3jgFwTJhjKEJRHViu
-xH0QjGIxB+8eZSK7hIip02vh4y1G6pNqFEjQw1TYeYRhAtAjdL42Y/4MkdbnWvQvpztb8NJOYTyE
-UK6JRWN7tg3gAf0T3skONNH5Y11dLrbbDMEmAAcZB+XbLi6EK/UPgzIRgg0coYA+hQx8kdPI1jHC
-4ByuNoAZFhl2576LFcIcsHNrYzWX+0XdNNWp4VSbyESSDgTnLYVAe6XzQR9rbLnjd92+W4oLJ+h7
-uouIjiFy6wguZxBGOCu7riekqnrpKToe290wl9OqxNtSxeEvgPeKdz6xPVNoctK+leCI+VIlb/2X
-Um9cJQKziQZTLIETGrnBhjc5/B2L+ghuQreSp/PkB8OW6qtm2CoN4tzbEhSYiOuKdRJuq6oAaK0x
-q3eB0TXifDqpl3BSIOnLANFMMS0B7BZAQE+berZe6aqIIpQU0FSrctcF94CDgHiHopYEU0BAG1fF
-UsUrJdIfj8hBY2CWFzSKeLZfVfNPHqStvBpKiqbJOr4XuC3K5OK/1n8u/fHHNHCc08ur76Ex+Vv7
-J0WPYGAgHglqmdanubMLcWyzL03PlqepXvgrWnWYwSZJf1yC4yV5GV5/8prmml+5Fw9G7zxnKSDJ
-Mzhaiym7WfVSL/6remn95bSIbG6Epxn1B2F80CIqTl/yIHRYMYIFW07py56rvi6qCgqhupTRPNn7
-m8HY4wuGtUmccbDvvEkU0eSU6CO5K30MJO0tAJo8F5c4GVxpMjBrgO6NtHo0s7EF2iis2FuAXvHN
-AkcqLNZKHCLlINrDEPks+QCBrNNB75tEDV6Y2motSaaqXTitYf2r8ka7qWcaWxOR99G8p5vkc/Cb
-4nWJgYWh4v6HZbFQrvI8zXOALKSitFNlQ2a5gEZiBY/g5nmOZnAI/V8srvuU9+E+o2Gdl4Afkc1o
-S7r9qJkaNx6QSGvrRPeSOBuX7+RCZ1qM7oMnRubq2AZeYnLz/QirUwpAquHFql4crsBnFELjY+Sk
-4w3M9JiysUzJfmODN//Ny39hbm0jCncEM+ab//bof/lXH31UgbQGJ82A/5Kro/xelgNk7RA+wY8C
-TBH7fmRoW8oQmbtX61U1k3cktA29JBiuJNi3uaD5c5j8nszpj8ia7srZ/EBcWHkQPLBUSybbzK+D
-J4o9ztPTn5bEc12gVzqyD81ZWo9tUTJh4gs/kg9q2aXXGD9DHs2EWAvEia0MsRh6eui/G/nRNBf2
-zzNXBv4kQpHYPwqwAPB3jIFhwzQ1/5kFZQV3DH6YFOegt12u5/v6hCF7p2lDioF3U2Y4UN6Yf8Fv
-oUlGGInhMkKho0sQ1cTbk52cFSGS60IkEFafnjHf6KhqVntBK8cpcdLt8XzuDE6LGk3NhPzY3KEp
-MX/swq/sxyma5n1vsbgJ1W3Dty0radRdSrkiU2kMMdYeyGiBJ+cWtMm3kk9ZsLS3nbwHnXhMgbjP
-jC/Hl+9gUwnbFSOXd+F7/I4YhpF8mQBhrxj5GHQxjAQM3TLkCH8CwFA1JzWsVOjhtxjG06W+Snep
-P7AjAuWTQrvPpApPegvsYZGxiG823V9NVy0qkQdvzIOcYFYFHHq1kkvF7Dp2LW0DA7XOlxY+E6kP
-L6dmpHbMUs+J2WJX0wMZZr8Gz0wgnA3NampuL5RWYUzPhqD/v5I3pFbv1xXxeNz8WjqBDTcRowFh
-SAmZjduO3H2jpZpS/J4HSlBWDokK8814bW3XjDcW3eZiDYrQJN19k54YGVfVFu1AjcmnVAd8Tjfk
-DW+zzkNiJ/PIv020dqhqFVQPEO6ANZ20SvGBCcdNkiEkxtsKzjVxRNV7O+IA47fTW33akqIJ+Fht
-4iy+2kNdVa/xPn0EVaLKfQ2mecFTS/tN9LZBbsOXI7BXYcqsXnoIaYLP+ChVmF/H2D60okHS9GqH
-eY6Ti3oFMXLAWcBZjpHtCe3Kmh3goKdLNEsC+3t7KLdCwEOUytaLcpkRU6Jo3byb6ELQTKo/jbsy
-QgdC78LwCAlH3n1SsJv41nsSQ/ZPIjyKtmSbp2pRAv26u+WIvmgqqeodObLZDEHmTkMksZ902kxS
-Rr+wBbdz236Xd0E7LWrbTTz533WiGNtbrrjOjN0pnMGfeR/wEC6qlpzpk0FPaSgQkyDCRPHEmrrp
-wjKvG9CG+WZ+M+KwnabGPNoAx0K/16sC0KvfNt29Bv/Gm/QY/SFv0mHpQhuDR2gcquyhcXg9jnqb
-Ho9+vNo9uZuDNMt+gFFp1tXQJh3zDgQ0W8L3/msLv62SD/JYnThkMcfBBUS2J6BonsuPvLWu02qy
-yuT7gB8DApEH7Gz9/zT+IQb/QC890sT9p8/xiCxnQ+sXddR73LDR/ba3K9ZsWay3mSAnhTYosRMh
-ZNOg+G65B6S7aXgNUJ2Jag28yxc7LY8DccXu2V6qtMXtZnHFdXd0I0S1WDoUuw9XkKgMXvuc2Ehm
-Yxk4+dfvsn99OE1DW/exavOkz7cqVvvUHPNLbiKY9qEeTIsGzcdf3m7QAdhiui9q75uh3yJEVYMm
-Zlmeg71GTvF88Oa/e/mfoQ6knL99898f/e//7KOPzBXpqK5nlAHqql6+tvgBb6vlag0pot9qH8cG
-7lSDllqnuXZ/RjQ3pH1CEYdLfVVMH9WXl+Ds1KPfoeAXgNAwdD+u5mPoeQb/IXSVIdpLxs5eUizt
-TpPL33P6moFAKe3gukEw5AtgeGaCFkvIYyieYuTQysPXY/eqBahnMBi1k5QXpw12Mbfv4Je3d/8n
-vXGxmigIK7zRBoxyjjfFq5KcwgGL+rTB79HGW/AgF7UZlZYQAZ7BzCIHB4N+Ga6dYPQ02w0cP7Ka
-0BcgOWH0dkujwv74pA5DC8N7aWVwCgYyV/1TENSipsCvJeKAigV6VK6i67PUHKpED7FARK26WJpp
-zFIAcYkQQ5DeXawe1N9INRg84bYUm5ytfwEG8hbgAuI1MiFjwWkpqYsCvxhKRWF49icKKoUafCRh
-U1BnfDMDxce67UqPaCNi3ijckVv4jamvGbtRNcmb19u2B94vGzjQYb5UzrsINR1SHzWQvsCdjrpP
-KmfmqfTMgS7nxCmBdLGfj8WK5BhswiQHG+0mX2hsJ9ggvZ00DUc76B8p21eR5oMbdM/W+1a2xZYf
-kgOBY+HWqAXSTnSzaDZbSHISrncQYYKCDBD+5k8cwsYmaIEx4W2McxDv1/O5oVkIMBwoRDLQhjJT
-Mu88MEluAf453t/lKxKcbnhBlnNu9AxtZfarY+olISnAFR9OJbjbE2NMx+Px7x8/+f34+eMXRw+f
-H5mfKYsTUOHoqqjEcmO3N74ggQDiH81R/q9f/pdwZk5oBRqG0nrzPxz9h//0o4/UkUqykz13Nb6+
-2YZGhqW/46YUrfoYog1xjRq6rWwy7UO/FR3lCttxiDeU/4nICtbwgqMV8vMLhfYXLEpanzVg46BA
-Zib0PDl++uzo8OmTFycCLAb4OoYGGEHs2cNHv3v4LT19AZkSaE3WphX0hiAWWybyQZNknM6vEVd2
-4L+X69mqWswUrri28Eh237hNUhsjSSGjBy56GfuJtkmu5XJsNWdKONIO6ebpdqFsKYCgYUwaiEgu
-KYbEp6ftYBEOn7bARn5DrdeMgEpElvm0mu7ySna2wpiw8TYkC4PXgqP/aP2YlBBFrVUNdz4NLY+Z
-kCdyxhatvB/SLifU0ijHXjbnQGn5bL1a+Yk6KOs14RDjeU1AOohVsWduFeWsXujXXfpNunc4TWZr
-8vNA1RnMnL4ea/A+hbsIpDRa1Ivsnr7F2CvyGOJDDvoIGYjWJ9e2psWrzjAEnxUYzpi8/Jdii5aN
-8ebjo7sJXnZ+W9BOBRbz+0cvbDh1ItHXEmjbcc+5APY4cHwLiAlcsdAjeyVcS8B1UH89q05//k3o
-RuyZ1Q4TmdyieT1kIwsAdY1BuzRGY4CZJJAeIvqM1Pko/n7SvKBJ8b0zxnTBRF7IUYrOJH2csrn5
-vEK72sU525vhHzQ/n2zmeWA2oztscml2ka0eL6gyyRSGXyzRrSVYzoTy3TdaXcywR03ZjOflypyQ
-ZlFWLikPNZF3fHG2LM4xBrT3GyJxN299jHorJwuZ6ZEtzTV8SQnlVtcOJJLhpjvqA0eaLKhUXMk7
-6xQE2446j0OQ6QAFm9gF9+ukqxGVhLd19jmy8pWWXY0EsW70uayZ9La1bDLl9Fz797Aihf0QZpFj
-9KJoitVqmZmXEDOCx2HUPAWp/5wXmJA38CKSik0FZpWhAiO1bgishLKtFHF2XeMBf/TuWD6FaEPz
-t07I0x4uY+1BeY0wIsNnAOC+1qflLOwBtu4H5Jum2hV1RDpEarRj6tHNxVbBDdlmSJ/XCTE/wEKA
-8aFWKA10wExJGh4z7swQBlCDngmrZeipkG1ZuoZ0ypDbpCChS8d+iYq5HJ2PQlwG6lFqAyPN6z0x
-OSkBR/eMMN8JSGPbNeDQAu8G5jrGtcWButuuE5GoYLUppTIvqWenfUjgKRxkqapZY4FqbJAbkHOU
-OTNFRymEgH94arpJxSGZWD24t6JRMwF/1N3DGGBHfA5Bb2waVyd+7sQAz8vNFwXkstXydEsHXc6W
-5lxltnIHJD3zzx2QpM5bDpgWrzDwy5xcTltQB3Tueh1tnb1hc6qH5MCivEEjOeuYgRw++eapGLhc
-BTErqSST82yleufBRjKF39kAAQ4LUDi4GqpWf4cqMdDoNvQf9Qk69AOS2Osymez/+ONZPZtq2DDd
-AywLho5KYzWquADfbKtMtVqnq0NuMXQOPu067c3qdXpUw8p2x+TorWdKqux1KO6ivsZJvyTr+Pyn
-OrOFow7b/fot+fT20rpPd8TUgg82paZy36D6C790PRvK+/CKxTTOb8P5l8fbuIm0zyAKcZGoFeVb
-B04k7OtHFvTTa3TLFJOJfw75H0bbow0seznAZ1J+ByR3QRwkSOUUUjZM3qzNndvcaJZoGtawpVSR
-hxYacQUFVKwvUwkabZ9f4vDCwfTUuS+xc8EIdNvrOZXMejudpm2bctzHBYnlBhJE4zyVcJHM5f0K
-MnwT5gGAH4BRe64S2es6BKwbnF65NcERgHAepxHlUMQlo7OmZk84TbnYPKSKQdfZ4xm4h70WYTsp
-Do7Duf/4ILN6QE+kNIHUcSZpcyVfVg1R8Hr+Zl1bGDkkb3aNgpwAK7PlMVQC46hGfSg0dKMfUX0l
-kp+euwA9Y4n9b9wA7uN/H3SM4xF9k6yuajLmm8GQbbYCQ0u1MkSFGiVKczfBpwXkTzs7M6s/n+gQ
-sEjnmed4cwudysHAEn/3IH9v92zdgSO0OBLLwXKNyL5U0zRZmesZadwxbK58t5hhnoSL+ooSdEFg
-njP0XRCJLxYA+Hd21hKhMQVW7WMUof6VVx93kVddi/GZr6tVx3ziEfGkXh1eLmaoMCmnpNLeyp3T
-W3PSC6JCUQE4mDVHj1QiUYvwJaOA05pn1DFQyWwBEsMZh1CT0P/eo7iZB66u/AV+SchmQMpmTV+9
-ghpevcJIJLBSwx+vXpmJf/Vq+x6+h8uurvIlfoTWcwKP3LWJI7Gf7FBONPPqlarwJp0MprHt39o3
-o11errrRZ6azwCoKz5/csOaSlPOy8nuTmTkOFbJWBMPL1mDEIcLTBJaTqaSDTa7y2vlVR1C8Yv21
-Ho+BAtxIhpcLeOoHskXyXJctuC5Vp/91vAY/hQ7OvHfARW6fQhpjJcTIkeanQm55NGv2r6sJz7HO
-uMrbovCn7g+ROV0ULtQYGFZ2W+xJff9PpE0bbjQyZ8kMfY/a7s3UwXybxKsvhUOZXoKL9XaZVrNY
-m37//B3QA7jBLKC9+W8CQax1QjgSb+LpPgWTvc0AfcE+HFVs5Jo4OoZqN0mWZk2OjHWYZFWOxiXz
-11VeLeDf0xyNAIReg6r9KuXcjOlpiGPTAfCtJ+NrhX5isXY1OfI1rSACGPVPD6y823WxCYoMXw99
-w4A7xgpwbFhJjyYQvHzMwiAHNGzUpXkCnAN9yekjAX/p26sZQg79AHIOpN6sv0jEJwI7enzvZCh/
-3j9Rjo8xxJ4m3YhAREex3acUCLGZmFtco5NbbNquZCYhUaJDiGhnm/BGWaWxjBfTGsgPRtYfGB6v
-8yrdRP6QW8fhoEQiQdrqSoplip8NXWdgvHunke5BvW07drw1byhfFRQMY/bKNpEwnWxaOhBbZDSO
-jjCynxbZFt56JkS1wa+1WWIhQfjRK/LWoXidwXNIoyy0Ry6k7+Hz3cKU6xEUB11tbPIoDwHID1q2
-AF/37A+8OgsraGk8g/cZxQFt381BixbZqxEyDi8vIexA7m7m0mLvOXyZoYM443Pmxe+fWNogXG0W
-QR2mNj6A25MXtpxrRYWHJYzO2EXjDSYfvNl5+V+07ftvbh39n/85+hG8WK3PzhjfBe/eDWX6pGv4
-KnlGvnU8MM9rWvlKtx0LtHG+mCP+HIPVIJ3A78vpp/DPpF5cAyWDHd3aJ3RZU4f+OYiDCJBu/wlc
-rl82hhrFPyGzf/n3D/Qwtl4MAuY+h+8RTRkofCpyf9Cddiuu5+jgYITbi1l1Kv4NZqSDLi9dVwSS
-Aph/xpf1dM1IQ/D0QD0czcurQdDW4vW59qWAnM7WB7Wz1VuJORDSJjkrBL0Dsn7XcEWHJITgr6ta
-MPQz7m3GdQmUUAfw38gyOQv5HKwJb7SDosqMZN60wtrhXadJLcZ46Ro1MHu9qWfguTupp3BsHKAv
-4phjjMbjkbwxZE2uusIm4O6SfG7EtU+GuZruqpYpeIGKycOn7t3Zem6ETIhCsKgQ0/WkdAVY54YI
-KlLGyOM4P8Pkt0dHz4jE7Qdv1iXcbqnkv4cfw+SxkaauW3XCtocUnc5z5wEcItbM2F/WzNqyMlTt
-ipaXhVttzsYHxIHP+bcmYK6aPFlMOXYYapeQxm2ZB7oQQmyeLwjoxBTRwCe63LvL2XIxGU1mFXAp
-U5AexJvUncK/dRmAllD1wE+phew7i6wYJqcRC1qRfG6eJ7vw12/MX851JWsiDjBEamWWrldnu79O
-XfF1vPi0jBcXml7VY0hK2/dtSP7so3y9Qh8U/JfcnZCU0aCKMBzm95APl+LK7IQFCjj478DdtpBO
-JrINurcFL/NWBP/vtyX4GKEPtyDl77cl5faTB8GjFin1E2tQZ0hocaLZQCQ3IQpv7U2h6MqDazc9
-EwJAFoYxkpaXmT0B1rzLRYtE7N/hKYVzPeJ9zTPwTVlOebK6zyl27gc4YvOlEVTmyYPRp2HFrqpI
-5eR0CPIRnGDNyHB5YjNSFuRAamdsVmSof/NpQGKtiCrgb2vEZavGi50cv0myB8PkUx0fVZ6BB62R
-U+TmEzr5T/XtJx7DEFQS3Am87K1ByUFwvWkPJlbiEtiIX8JXt3TXxhmflkZymay0t8JUqPOWfZs+
-/OrR1yANvrtODUfd/Tx5+C55eJ189S756jp59C55dJ18/S75+jr4bgmQY9kDTLi4KIvVwSf09b17
-98z/7if37pt/799P7pvf983v++b3/fuUN3SBJ/UBeRJeFotstV4AQBg6U+TJnQR6CneELKXKnS3V
-XG/XM6D34+OTE5tCCSpE+yxUrPefFH539/j6xEXP8wv4fS3fqdrMEF0pVx2l1MK+QjjF1Mj4/4Zk
-fLgvNetTJsM3t4/uphHkqpsgVWEWOCn1XTV//T5IVT1OwBtArAbj5u18bLgnewP4iWpT8LzZyY7/
-tHNyN98xBySWBlVMqyShsZpL6q55bb75caq+iNW9TM0ptZ9ko7u5qhitaZGyz/mV/UBcjV7Y1ejH
-1jL1t7G17EMPW8t6v7Wgs6D8EOsShCzJl6b/bliSCHC5zOtda2OzyFxxIK055oWS0gGQllWIdENp
-pfpzs10NtZsTZjOm1lb+BTd0K/gH9CYwg6VD8mYYHx6uUQQfAfM/SmjiwZ/T7x4++TbdT9JH6V8d
-RxegA0XwgnhA3WqdOxFgg5jLqdN9YIKzM480YupGu2o9uM1PaaZQK+EiR1F4mO4nP86p2rDjcagu
-3/VlA8iYN01qw3/oudKW+X8UE4abxZsv/SKAsHhPL4p/1Fh2BF53/KfkJP+iC/CjY6n76a4L4C40
-qKx+DrKddWRk4+GHh64DKv1ZuHXbAX8F9h7qAPoz+e1b1b+Fuopsk57Y7liKj+3U7iR/uoMd4+cB
-eh8dVCH5D4ZjIyQS6BIRBo/dMuwZEalRjsGz6h16w1SMmg0nLib6KaZTCuOGqbjPV6CtDAI3xvay
-iHrezJ4MB78keF5fooWfB5/Xeb4ylt626GUfHliPaox1IGz7nzD1/n+AqWdl+5uj6gUhGWTFmpYA
-FFXOJ9eUtz04jIF7g7EnKBWArp+f22BBszxwCSRfUf0idlLq91uezODivIsuzv0fu2SfmCkDTTmA
-+UO+h4V1kAQfo12BwwFh/YtWVa9LJLrdlJypdLsORSpH0IS8FWTT7XxC1fZNEZZg5Crzdx8abSvT
-7IfAlesI5ros3lWX60u3v4FQwEhEkdlrSA5vZpicCltXHF3pLbAPl/vo3jknXR0mxkYDieAkIBWB
-DBvDu7s3CGIkG9QxLJuhixM3hzaYorpOa2Ehcq/m+EKoJOKTYJ4e75+0OZQmWBqaRRyCxtEdYc5m
-ZXCBNfsd6hqElY9U1oh4NBlnLhifzeNgRPpLOKipeBoVVQN5xlWd9+0oNoRewdCW8y+227hngrOv
-GvFHD07MB8kZJpwI0ST9XAVqFPCVJ+T/OqWAnPD533Y8v38vdivgzli9n5Gx1UVkyNXQnvtx/uO7
-exPQQ0QcRSB0EwqbLWn+D+gCwBWW1VS8lk2HY1TGmcr5y09OYqCAeFACANL0+G9PctwAKP1h5wEk
-08z2NP/8b9F7Dcqc3A1vBe7U70wvNwFfOLqcvMsIHPAm7ob283ttF6DWYvzmi3eXs+h6yIwE6j65
-nkFNub040QbEiY6H7MQm8VKl/4apREOz0heOwM0DItKpsX9Uc7nJuXHn5dxcK+rzefWT4T2gNtyz
-2WY4B3fzulosyP9rhxDn8vfjfuEOJbijA3sStHsKJdjpmNb5brqX4hJi4iv0qq/rlRcXaWmIEh4v
-A1heqTL/mSx8hQnRZa82cD7A3Yp02TBlGlI4OJt4jfHn0K5eJ6Dqh4RABk0un6ek6t0OAtldWT8s
-DDJ04sPBIN9KDucJZLBqWGGLS8MuVJQwhY9XukkPyW0XXFcw9RSKGKPFtU6X5LBvZnX9GjwHWSS0
-4QLrOVgzrkoKjASMwWLWrqpeVudW0E4O2uLP1YVkSu5z2He+Zak0kYYu+2i162tKh85HLIlxJQAc
-G/abA7+RmJbjhzKFeVsWQDiGt5iZY20PblneOCiLITwJOpjDjQq5KZBqOIVR/vWIXK8kLlU+ojNP
-O25nwNWmnFYZ3dPktWF2+c4GN1Bv+fI+Fd6gJdd0LGCXOOZq7xJ8zmIZtH4BeeeDyDo3lnPEJbEt
-4dxANPCU8R1SwU1UtOiz6lJwgSvxdHbNZ+TK0BXUuM9Bze3aeUy+FnejMNIP3LHx6Ca30Z2Wz6fb
-o0+XPDLry/hFJ7BxH4g3TDI88s9V0t/dxGY1w2iN3bepV+VJrz6QTNw9WN70WY+1AA7yxseCzyMq
-eBAFsaihynzQBuymd+auP1KBsIGiHnpqIbjNZ3jC5ZGTH0u6ORdHYoL5tmgKbx0us+crYlgzFhxH
-QsHxLfW4lS0Ie8Q1dgNKm60ieODJ5zg5sXTtWdANBhkA1hO8+VzayaPqcr5bHjbqO/cNHrdmJ0qS
-0y8iNdSY3RQtzE11DpnqL0sCsEOr9OT1dfur1iRye/GCFrI9XEl5+4Fd6LcFT8eCcXiHyIklG4Ew
-LYivayJWqrOLa5KBCSM80I/9HJD17fHSqzPZcg9OKJ8EJgKghABp3g2CPkyMSHtmOvfx+8Oh282e
-+6dT0CPyuy8p+xglPenqGGcH28+26NRuJ4i67Rdxlmjn7p9gZM1quZ6/TsPeHMHTZH937+eBuXuN
-W5xZpudoygO+5xmCI0GJOou6U3NBSPdwcTvzGkRB8+VIan0l0gFndvR4q89VY/f4DhkhYinAKdYG
-etNieYn2glNz6BoG8BYwojH+DVgIzSbWn/dlLTHTv8czz2Mc9n11I8oO6mnLILeSr2uwWb6e11eG
-ERSYo6tquj0Y4F4MN1ILFn5WrRjaAjxp1pMVALrDSJo92TB7OHW5+MO0M6Zsosj2wKJ5BeAC2pFb
-IJpTwEsp4G7F+eDN31BaAXBte5MeZQfWe06nBaAAl3bgy6pYKRc7ybYIeEjoDicPXgNV0oXWfvtT
-5eWSNIIx/nw/BOI4kqeOYBl6Xr9D5bTbjbcc5CcACGqM9AV7wFiB0vclQdAxQWSsBp6qTWjIY204
-oJ9HKNVoSFCwaF7DP4fzs6Cgg/5A+EK4Z8NRVb5bhTWijD1uTFEMMW4E3hYCtyI9MAVApl2YOYDy
-iMgtKCBBUWxxPDOXPpgmrCpJL4rGf+R9Aj50OAtjSXEA37jB0AR5n3D6eoE+JfeiYsYj8oqu54bK
-0EEHSq/nhsjsL4a3l1Sh40lh9u+YgLaCauiVlKSqgCyorhPJdMHBowDhysDBGPPii0HsOtxd2PsV
-SyplJB94d0BVUMELRMZd5l5XvFcZxOkIwpHZW2iActk2dF51q98AWQDuZ+g7CHf5XXDKwGwHaHTk
-QeQJJ0yv2XNTkiVeDskTROLNIDz7YmmmcZQAOPOkIIgRs2FB6zVUX3JF3GIC4JzV6XpVMoxCvSiB
-wV077aYoYQzrNGR82dhMwaZ12PHDBEEQwT7JYz/ef3DCjtVm8Bzy92D0qyEcC2hiNUfd0xcU/jZf
-X56Wy+T+J+4LUMggri2gdtigwQY+T+ljL4COa/hUQ/VnGfcOZsYrjQknoMMjcMwG/YORfD4F/YdH
-EPp76Wr00/uf5GEkK4+EMHugDjaCcWoaM7mntOjoiSPdzkArBGyfEB9HcNmop2XyN3gWjF6MD58/
-fvh1nnx84D+IN26WD3CBCPukmO5hVuoBO/VPLkzNGRGs1PXD88Ojxzl/DVCfQKug4wIOQKFoFksF
-vNfp/YJzJpObuSlEnefN4rkzKBxLQ0HfIhw8enqxrwZREdqHLTARdNDwMYansqlawC3qaioTJ/c8
-S5gB/CawPFABW61bcdq4Htl4X8Lo82D7DBM07WQgbWoQP5UqglpKR6Cxhh/HM9LuyZf5/okOqsWj
-m+ZGh6kDqzLHyUFqTtvXqZukb6pzkIPq9cpDRi2UGhPkRoi3BF2qReeBcBBmGVkxRdUp1DzE/z4w
-ba0muUwWqHvv8342Jw/bI8zfA6eCDhFnTPV3XXENIQpucfdVKhmvRvMRBBOFccZeZTw7HlQhaoIQ
-+5GRMeFvmC+FuQuxx6jIY6p69vDot6TAsLMiYIMK58pQVYO1qfRFBDLZuiHTY6Qqdo6mWA5oyJxb
-xydyg+XJAkKRKs19FQSB+aREmmt8iSlvN3KMfwgTJRdAwiQCZCPzP7PV5tdEfnbqCBCtfFdO1hgq
-3jAGLrM/PT5YjOgI4UV8jI//cATH8+jR0+8/Gz3+w+PPRl89PPps9Oj7r1kd4D4/RsQy7MtKAAbN
-ny5dWzhPMtAp3iLWqH20g2IsJlk98Jpn8M+54ARfDgKfSxHQgHQAS1GaVr2Jjfs4TVtzLlYc3QdD
-0DAlLrYG1oGrbrTxCUDg5fx0m+GsWjbOsma6OI4BBhP3gwF4Cjl/TsN72IrQ0iaz9VRwKd3W8oyX
-3OqYRm47cddu/Ygra9WgR7b+thuZWJcabK4sjyqj5O2gDcmLkd/N64wjLm1CD8cQHjavyQmJYzQx
-204xgSMEZC0kIgWDx8eIBRuxwRzEBO+HoBr+Fjl8Nn7ydHz45NnLo1b6QVSoOwiB9ElNwa/ojVu+
-WxheXk6T7N/oSsBulX+WYHAzxgKxJoDG4im7BTzHXryy3lL8p/UFj2A92+Ls4MRTG00E9kdz+XQf
-ZLeXOQ4LzZZ2h7pR2tmVEWURAqIiwwQ5DiUw4sXN816/UNGV4/c2bmpsLnU+MLNZ2sOkuExOK8zj
-hXGgLovJx7LyhM88uVxYeOaiDcwLnLSIduReqFy4r2GfjRzVnd8A76GDgfkvmKyg//kA7GbwJySr
-cPe4dvzY8Z+K3Z9O0PV2dCiymH+LzRT2N7foVQmBf6SL1XDgUpe66GYYAewCVykg+HOIkLx3B/7T
-Htnt0f2z709x6bH0HhQb3cN/JCTUq6mrnup3YS3Rz7v68Lt2H2IRqa45LAnfUKQ7zUX7lh9IugGu
-4Ct4+wppRolxKi9SoSzWlZJqAze8qgmaaqFVEHiPqWq8uI4fLsphYBDnzvJ9uw2LkuEjZMiciGIj
-u1hdzsLJMCKaQllcYPz8wlwqlqIHpRSUECyKL0HmnQcxGzItgp6gw29+gyh+nx//6TcndySYMvlx
-enff0DP2x9usJA+6r58BsRtOdXqdZF/s/6Y4/tPnJ3e++Dz/win3uB7eYXZbaAVOJgmABO1jIcZ/
-eQGp10555r0AEh7S2YKt/FHohlbQw9nCOT5id1o6I00tLHGAPE43NvUQCWCmIhXl7x9/dIQCblcs
-h+AMZvjae4T1gPCEtSS/0Q+gqrzTw8LIOVgbHz9xKneSHtRvg6lx9+u+9H5oyqkvI/ued0rKkxro
-3Eie76ZvAcsjwf5CXNTQOYyrofmywCpZNSpHQ51Si/yU4CzVnkrouwT6I8ysZq9zIKLWc87epQ26
-PXKqpPrisLEOugmjFOmr6Lnng45SChOvX1H7Y9h1zsDn26vo5ccHfukNvdAsi5axrRzNBPR8Nl7V
-Qa7c7yEcubAJ3awiYqicyuxLqgdBIK0RRGsuqAXnnPz5559Td6QQqUvSvXWz3GsuzG1vz1qn94pd
-PBMWnup2NBrFNpGuoZgjDO3uYlZMTH3LyZ6FsFTW3HQ02qP/i7b4D9HlC/PdHqS43aKTG1q7SXd1
-J72J0w2HdWt9k873JmcoOJZSes6Qw7byd3YVAJmMss51666IoLb5iInbtTQW+z4rv4DLa02Xf1Uf
-WZ0GVTR23gHw8wbf041K9YDOYFWpnB9j8a8BXasuYR75V3h63M6z5z7R79CKJ/0/NjSVntwB3Z0u
-n7NSj8vdJbWMXekTmy1V1UVKhLb0iBpCNQ+aL6nHJJ25CtvSe0SXOiCIfFL4FkRCANZq+jMxG39e
-TQojqoAKdNfWA+nvJK0KCzWMhR0IVp1aVPCy0aSohA7QvPhd/A6iWUPtDEWkQkAPItWPVgWoLWvp
-AIWokH7CGj5HfgM657jVL6nEtaZKfRunyqDw8e6v9k8CbQcn28XX+7u/8jS3tjM2+TplfqtnU0hy
-cdUeqpTIaZgXnGERH6OcP1nW5m46NfIppCeQUd9KLAgmedae1sulGdiUfLR1xQQofmEO6niOmgz6
-JTMExXBXYckwPTtrdqGQmi6N/4OvBoMQ0NSOf7CpM6ZkvDM9WcpriXFSXXBriRhPbJwJ/fcadT9D
-d/rYngmuZiQekXYRvAQh0TaLRqgITMEkRZZq8hxXKb1p+IdSjAVXXWTIjsYgCIIKFyC2qineNHaw
-h6MdfwOynNVlG88238liXEObOIL3bsC5suCsbPYm9Onpn0JSpX0Neb/AqoiYFKh3ISlWkr1idWz6
-QoFX5pxNZGracp7ah7Or4rqxDXVOc5yLeUNw0a04oFyhzNouQkyuHQNF78/GYK47gB5QPMdBlroQ
-RMw2Xi100vH4VBXoPY1JNOx8RCascST16pVr/hWqDnAWMtPpYj0D9GE05cpQlS9HM7DBQOJq355h
-pO74LL56BeM0bRbL8zVlLEfAHghtQjTaGkIADNfdhaPB8/2zRmuyun+WcG/xir9h4kbJMXoVmh3T
-UCUYPuO698VJa6u4Kdr3kgGa45yuNR4pR+57XuFZcXk6LZLpfmtPHU9d8JznfTOCkGHYqJDO13YI
-6sumOYXVQWSuzUlpRiT+DeBNBLHSHPfducvkJsnXDrMHpqxYhUnlYBNwutuF2sgAfcaGbYxq1kQ9
-SrIjrEYQJ8ARjfUvALtqJOqCogPn1m8gtrHJYisZl03j5HeAeUwaMk6SMII1uN6hMadorneZYEGu
-JucI2fIEEYU2IR7b+aw+Bcax67aObtjyBaBmQoHewBc8pVjglIQejNphDL0O7QBSzSJ9juIv3rcl
-Y56Y6nelehc5gwry1tp8W87NFhMeiQW4zEiluKQjkMOPcAua2nhG3ByxuC/jIuLxqiL/ERsZBdom
-5XUMK3VFBHBVkGsie/rJ5dcnuaoJJlvom2PYA1IfxAFTpGT7mJM3LbhqG9bkGLrnw5S1FTUFQd8l
-mTnkr3PQL5oNDSml2Hp2VU1NP9/RBoYHF2V1frEyT6jbALZUgFEJoK7Z1iFtQhrCaX010taEqp6s
-ZuNvfzh88uLvsjMtbLUxZMjd7WwyX81iL7CZuom9Ih9KX7uyJHWfeT5i0Pj0Am67WP8I+2U6NJRq
-R0eHTx9RP921Pb3/4JNfacUdUeBmP3LQAC3hdpTdGyb38l/gAzEYErfD0XpzfQ9DnLxH99uPHuRa
-9Jose5bnbEoirgR0TXDipiDvm19Px8+/fvrkuz/m7UUISSCUeEmLezbtn2WbxyHSV2wmC02U3x0+
-efzCLPiDT6mL3stHT797+f0TeP3re7nnoAFGvskS8m0M5W+IdqEt5twAlVpbxSvMIBRqjjKT23sv
-4SOrNEsy+IV/4YclZvW12vMIINRARyt2mEe6UZf0pcYPIQC/2e219NDrA/G1Hf1dtfgGZgHrUJhR
-rOM9aCmOTUFMiIKhdDnxF54uz94vaXf98tGoPCiCFoM2EAQCQHg5gkVsoi61rfj4RVsVjFXr7JS2
-bEdMYwu6IQpVezbfBnOijVAbLmgrqQaoZ+ZKI7BHoY7eM7BFxMKMHvYgbMU7GAXDCPsYjjUekq8o
-EUJ7r07TtrmkxZOULQgd/iJRjZ2AWRE7UrQsUnhganL+vxE2oDc+qnjsxodfN9n4v8Smr85skzEF
-0vlPTDA9ZVZQyLWFXpsHhm3sm+fOJNVXw+lPD7Zo5rSjGfh6u3YCdZitIo3eh/ogJ8HbwEhKaFlF
-H9cz7j17ddhljPT3DjUHq38g0QmjgOlC6QjTRZyFcp9hvH9z8GD0Kd1f3PUFRdPiFF2Wi3eoaioZ
-42eFrh7sLtbPno/92OESXIyR/xLeB/6meMglHKD0oMlazEd/+bEZvenSmG4xY+qZM1Oc+I5e3W3s
-t5mvaqbN/jC2q93w9ihaNz0kDIcLj4iYh1v3IeEmjtwcIlw1vvUjnmQxFuAbNLuZcA+DJdIdIVzu
-ZIWsj7ocgWmha1r2u/KaA2Yeitc9/u7ocMnZRcStPmtNqgKQrC8pafV6gQE/HGUge4LwzUu8R3bU
-kDWLclKdVaiYM9f2qWn7kmDeNiYH7LTjp4d0NbWM/nbDHnlI27cbUgu+LWbVdH9TDjnFHBy1m0tw
-hx9BlJi7KSdqktuClOLfRROUuSOdnQpudqgraHyYivr077OzxZBrvuEhTx+1QCO2lQGA8n0JoC/m
-J6N/eDIjwJwxREWoDzE/1otFDWghUnWCVWO+eDppqPJBD0SrJ31xce62F38EAgkoDWB64SJ8uRg7
-HsWOPeiz7Q/FphNcUQJq21PQDvid9SIiVGtMMcESZ0EX9AeBU5F6dTcdcV93oa+U/FHKs1zoDWYQ
-WXIzZes5bH+/D07gs1Fa0Yufrh/hj187F0GvcTybioU5QujTPSNepgoix4kzSoyBMvmmQotrQpH2
-S8p9rWrG/Kdz1NIYqNvdayl0ta9t5ZPUP+53u+fe0LG3IhyZ3vKfWVuwwpLWLGrfB27vLP2BQy3K
-Ez/ZP0F8xL/lGciZ3lz0Cfdd44P7rH7gAbRAT/fAg87jf5Q41bkOdvjT5R6+0w8lhw0w6sMEkz+f
-UsyMOu10kgiV1sKmGbJPXKJul23BolIBJY8Qr5R1aOE8aCnCgSOQ8feLyJvL4ryaIAZFATHPzReh
-/H1mbnAzK4BTq/YszYTViHn4dmP3Hm5+OFTzzxKVWXCyEp8xduPtOHO9A7eDC+ZBBsdWGHHk4uA3
-DipTP78BcJjBm+zlP9dp4t7kR//zE0ww+NL8qlYUE0lnA0RBs24bNa2DW8mTp0dmatF8QInukst1
-AxRyWSBYPHjHoKs/hgDYAMVPhpDGCb22EvT+mZq6Tq8VzihjSCkjSFAD0vB6MRpEIrpd0Lf5x70f
-ufQmlDZJMkthz4/MIy/UGaccw3OpXIl7Ggxy9oEN4EXKWFJs8Xxqf1IVXso9WwfNl4Qfp4eXi0PV
-jPn7O1snqJrGYCpIEeN7BTk1OajYBu5KEQCKKotLm6gH1+ainC1KBGKZlyWjkddLQISVwK1nj58l
-n9x7kJSX65nkcrcBjpjs2+x42lwJ+79PSfT1kiEWS7OcM7JJ0IZDVbjpEOnLfmVVAfTWXNTMp4gc
-gQ8ipnYHgORXBLI42CpX1aURsYpLz9GHOzJCWYNnRPxmKvD6OAerj2kPIimd2oTdDw3RvYVMno2R
-MWblLogSGObLH9kYTWfiAlCCJfh+//mvLvDgagm+1kudQUkrSk4h3yKFt3brvieY6ag+HY3HGGEx
-HoduGf4NJ/Y1EHtWn+bdrVwuwavNlDbNmL9v1gjW1r7hYjyIqTIz/xsmXkxI+P/IiaQFFBJ26vj+
-vn8r487Bzo30y9VBjQ+RmpE/oeE1eSzxWo+wr3C2w+WuWZul1lgwuEdgy5jaWmhMK0r1RATQGaIl
-BY5XJx45bA6xwVDf1ideW0xnZprgDEHmBV/Z3wN9NbaT1XM3hvWgva/4rRivDX+sr+wWkDgCcpWa
-OuK3QB2G+oZYXAeRWn+++aQXIom9A872w/q036FM7fXihIc+iEwidc3N1bSarPRc0e+wUD3xysBP
-r4h0C30z6U/Nh7gYsx7vIMCrqYTZoi/QQZoOLTKCH3H7R8gdZqQQOjT4fKGQoaGZQHN45xbwmw8W
-iOlfG6EKQvDooiEu9rD2ygmLugBfFhOAiMPzmith83KKp41E1wN8TgVOzhZUzHnakNeVIHaKS4R0
-qZrbCnHEKUXnUIwufMWplmpSZpijeo61oo++SBhW9QYmfSzPlQJsOblEoBXfRtLjlucTCoZ5R5bg
-TpIZAQae6V5+nEv7OjjenpimlzRPWGRMvu1EFBbiwTqJGPE3mEleXx657ePVRYUHDHqSoruFjby1
-HkGZOIdIeDp5qRBkPlUA8YCnJbrNkTAxzTE4Yn7tICxo7iZAHOwebHYPz7/MklQ7QreoeS1k6TrM
-Aj8IhrMKnFxVCkh2uyvW5xcrumugZ9J0yO3BIqCXvOJMyEvgs8WyNncSMFIMxb5vfQQocoin8vG7
-Ak7yhg+fW0SDegdwCsoJRG/gkrFnhd6Feetrt2Ywx4hNEfuQXoyEAIaJfUBc92464jsXyso2I0oJ
-Wqlhcnnw57/6zBDRbS+jbNBLlHx5jHyOYizkfBKJ1OMJUKGWNVkbxp6HYWbCaBUD3UN8Ejn8w0Nv
-zALyeBwxkHZlDA0joZngOO96+4QIoGS6W7LhwL9UO/0A4Q4rpL8s6+0NCwA2gi6ivGzH6ANvRH+h
-tZRSQqEd6/hkEDGo0kVdAICZ1a8A/uwKoYGBCjmvaVdHjhcUyWQDzVibSwScR5rVucDbRx2RFewK
-e8h1iINMjqYm8SwOaTg8Nt/voNQbndk88cau83FVL3Zn4POVqO/Qlxf00v8fOSo9jhQMU/EjueGC
-G4135c1iChhdHNCK9a2ZUESYz+LallO6KVmiQUYlVey3vDJCdiZFLU1UEaYmBgiVPYQbb1Mdv6CN
-lmhglYAmA944uPWld5101NruopuNgGp5X10UDW58V9DXE7Rvxsde3IR8N/J3C03LYBDtFdzOvetw
-tJRLi6qUFHbHieZAXpAEBoJvw9eHtEl28PtqsuPksPN6aXbGJfteu4ozya8itjQge2mk8hppHA5N
-sZI0rORGMvLrJHa5dYXsCup8uGWTm9WbrRvy2uUpxA8RKWZZ/1TOEWN3Xc1Wu3YHO7lUdckhMjqw
-KtGykQS3big0GiMJbSYi5jjgMzwmttMSMczxO69WFkjBMkwNDw3gmyoIOUA7prHx9xAjJcCr7Zok
-W9BVyVKecEArtjbVfFKysg/d53Fgb6sisYNw/VqfcmiftMoxLTujnRzQUz1ICC78sSuN0qYb3Nbo
-u5JuqPsrnizvq/ZBLmdnK3TL1p73KHvIGuaUweUK9GegIPNWhYYtbHVbuarHldMqGzO31pG+qMxl
-Ho9hMgvYWte0wgZpwyu4+Yn1ejAIOLU7P5T2sZqDZX2lkk9x7xlNCg5ev612yVEDuTVzzOlB2k7r
-R07iEkaU76KyQ1iAn+3qDBw5XYWBoq2eMn1zZ9HdBKvp8s7jTw4OUtnXKUyh1NR3rFnD/EYHETsl
-EQ8RPHoPBHYihtvPXUHApjZoBj6HsEw+hc8igribl7N5VK6kWVWL2ON/4pjIdpPcZil6svc7PSVk
-ZrzbWQui3LCA1ze4LrRz3djL+KCDNMIJ5sfdYg4X6JN0OPz/rlTmRB4rCRDLaMkB9PimUoAOYpiQ
-/xqHSBwEKRCC062fYwUnHidNR5De1gshQP4zKCAHjDtr/ALEq81/XU/RPSN6lIb9Gi9L9C3oU9HX
-Uz4LdLVu9HYYQ39EQ9u9DWlchVfDh23Ssa9anixKAKBbJ0RljfnmNR4btjkpAGgOE9PNKUCHryd8
-9/F8DiFO8rMEIxUlFufvQX9XiC4V2QFg/E2L5dQZniYXlRFhr8OjzbTkw/+Dn64SjILVkGAlcR2i
-GdxZnu7kkgjGESIvWgArpefRiSNu6qatdWU3BbtQxw9OIjsdSx0cAAU8++P4xdOXzx897lkncVcJ
-iCFdvkzzdr4u2wvwX+AmHj39/tnhd4+/HiLVPRo//sPR4ycvDp8+yd+nWXDAclNneAuhY3XuC472
-pzMuJpCpHentUB/GxJb6OCgWA45TslO2w3zsjM7aNbkBsHmfRYP+xEWGodkPIRFM5rWvBp23zRU8
-FCdvNaJM6Z6xcELcJNvCYUOK4piyfvft+OvD548fHT19/kd/45Bx2Ws8vAvcpAc2RzAw++gCf+id
-IceJzVVATyJ986iaTyMBfaOPWkw2BTTQvp1l+yfbqmMTtY+Cze6KqpvKD0A6eEOHxW5O3zUmTTP7
-vZNII0Q2XM5KUPJnlM0OexwnT3gVZHChdfvwtOjS1f3y1Phz1tonZVyrSHrWX2Cl+6i3HVfqbRHK
-JtLh4x09NfCL2IkVn4uzrjnAFj4kMfvTHyNnptA4QdNLdRh6FcRBKrUuy5uk3FMNuFtLkGVcin/A
-TbNhQ8ie6jxdotqJzhm1AwjS6qg2tOxC244klxuIMroXrXuAD4prd6hLBUJ/DawPpH3UKgMeAZ4q
-xb7ZQl/Mek40I3CCQPv5CGoaWy0oOYcfW9UwuwSeADxCk+lsh1Qfaz/sYwcRZ6sIsOIWZpKgF2bz
-jkXXfBNdTfuOz13Zj2kazubauXUc1fl3Kz4whuZsfgx93j9xiFkIltWOxIF4wPzg4AEFjM6PMbmc
-86yVrYagnlF2aL65d7LR9qAv5vhF/FruruZYaKjsv+1ef3xwf9tJ2ayRaqXciyultm1wo94CH8vf
-21pu3kuloVRaUcOHyzakdu6wY8caEupSwSqQJG0YwxvmGOycHlrCsirfljEThQ+yb226AuFRMocq
-nSkfRBlkAejxKUYMN058Z7HzCSV7Xl6BczdGmaD9gXHOLsxN3SHlEEA+Gs2V44C7pBfLes04DqdF
-U00SxRztZR2+lz6PGISEv0dX1bcYldeU4McCEfG1tcvYAVLUB9p3GPhISpt1K6YO4KSUoojOSdmj
-q1WOziGn5uWsBDdk07f5Gr3bwWuGzWsNlDHF7fzBVDTURfAqKZbXI0+B5UlvtqsHXQtwbOngRLvq
-SRCbbw+1HfBWFHu0hWeG6oz9TpHhYLP2chv3DYuYEFd1qk541oiO6TH8ecWQRq6rQ1tLbi3Zmup9
-Ab4D8QO7oEWryExsGm9sMIFFVnsxODO6lcF2dgL3hdbOb4Ktr637siGt6MYA12DkRF8LvV0amxRI
-KhTVsvBrhsZ2Wgr007qqKE3Ntf3e75pnkSQnCmcxBXberQK2zGBI6DoQqQB+duh5Zve7trfYdOLc
-9yHDILEjXNVY3zcIMoC0o1MaX3kGiScJD+xt/ZqcypSjoDXXzu0KXJaTi2JeNZdNkmGKD+gMp/mw
-nqe5NdWKg16netI6BtrcVAXESs8Js9YmbyNvQnCGM5yqQn7EtmHwigOQQ3jJ1upp8vCbo8fPnSOK
-GQFoUWfXQ5m/RtXtf/rV42+ePn+s7dtLw3YJlUce0uRJemPMX39RmBlgr7xLnrdvkKQqdGkhlS/g
-nJE76bQyc78s0RAM/PaieFuZa9CQzxHw0CQkSviIlxmZHa0pEjauMjjBOC9VMCKfGgr1u0qJQ6yH
-5FytJ6ooOee5W9tRgtGzeFg2pZ01FQyBZG+zk0BEjdnxHLMkXo141oHlv5p7MRHSTFAJBrYt5URF
-3yzJy16DizXOLoap8BFVIPkKoRFGlTmmF8V84h83+oJjr0C9NyJf7RfgvTlLvJJ+R2mYDsvXnj4X
-HGbFq8jdwRLyjkM4HyGEeFs1e8tCeinexERhmInnfIsHD9rjTHUO3VX5EECnSQ3huV2abwQpzvnc
-+UxeOTSCB9lgC589U67HZa99MGonTtsXYal+b3x3zbAz3lbQLnl+RVSJJ466g4/XRAy2rXWhj/XJ
-qaEdyfQjLnj1cqwU1ZCQrFwp6XaHSu9okDv/Sy1zRnhDmyPQpscUp1rqlH3YIaYuy1n5FgxUIh0i
-v/ENS568mgiyGQmfXv/YToAHLTTgFnc60sMJzzW0985AkXVtPxgSvnBrAzCmJ3p35/rY853VITjq
-1NyoLzx3caFLDQBofbsN725aXn+MtA4OT+sGDOV/z2iIArNPgdrVZbXig8acj+YMxB4bZgBMc1eg
-DxL8wy6QOp0g+oa4eBM9bEche/KJpXMbB8UOdKlj/+VJJJuYX2KoYgbzsA1wgfQJ2AWaoknJMQwq
-h2xC7KXCKDSL4u86XZQlWAaLxZR51E4QEOSzn7Cw13mxS7nYRqUQVHnkwncqvx6iSvZtet/oFZVo
-p/WqIbOtDePaeDaMFKznjbdqZF+0tQXb7I34viDlwM32xnb7IjxNOu4e+X6bNq2mTyuWozaTm5Fl
-R2IxFc6q3MQd5WCQ3tRJ+ymlERS5CWKkQccI4iaJnuguGchRZGSce7Uo/Z0KX6TgdYqFlgVQXXSa
-USYvyrdoR+CiQWSv5RphFQTAYjoVymhTq/iliwa3wiAUz7W18t3O+QgpzhhySUXuHLH/hJkbs34D
-F/CAjhg4WTbC2onodAA2lP6SImAhQr4SLF85QQwdGvF4VS1QvLd9swItQgqjfwfdEe+MQNKie8+p
-IVpC9yZk0zuU+ZwzSlq2Iy6kMpazEkP3MWrc7KcZothDxYJnCzZ2gn81q5oPtTZlxcnSLQYvbTKC
-U56ZI75JUL7FTkHoVUqrn9LGeqj6j4S2NAyIcdSA3jCn6n5SoPCARIR5DfEuzZ790zVhUAAyLCyz
-vVEh20HqVG0guMIUSJuIxWaSGDL/mEEImlkkM4MYYIDXc/gKrW8uATJedswMZkcC0kCRZzSXo9wT
-ryhZnrhrkvcZGhqyohHoWpwx5Njo8Oxh3ubs52s1kYZ1GqnrHOWlo1gDEvRgOF85/YxyLfOeBnCW
-QOSiexKMknIqGyEOitKNjWG34SvOmmteOnqEYTfNGnYLOXvLDqLAivLNGu6nI3UZsyWsf3nBUOjZ
-el4hUzHT8OvdU1AkUmQG9NZsIwnSwLUQTq93MFTEUXGmxZdcWxVtmW7TRMwUXoeZq1HpYJbFeble
-Ftd88/YDICEXDl7RirkfcWfExzkSP0cbes6budzTl3k7agSvBn6y1SFSiucajrt2gpEwF7hWlCcV
-uy/eXRzuiHumQqB+VrxSFexV77haiNBtKczqj4BAR7HsUtT7RZA8Bq+Kd4J7IsALPC8hc8CqWgF7
-LBqXW8ieBLeS/xFjS+cMCQ0QYqumXCAA3YjwPhrXHL2kBsP2XNkx3WLpu7uq0rvJjnnFgi+cXZht
-bkcOsp1W4WteL+Xof7x/AsAJwOlozQvaMDZ5JNGrmi4L5wIA41eG8RCqhwJAtvl1XTgiYI8q/fR+
-mNJKkQ7mZPay9MY9xNuQpC3cMTo4w6XFBha+9HIr+cMf/sC4JWbTmDHzyWyZtGXPIFIUGIqrvsYU
-L2BsMASLJOFYrdN8w7KIT5znbI3dHMoa5kFMAwxBpWML3KoRHUhqztvRByPiidxIOOLDhlV0cOEF
-eO2EMp+hEnd96jDTIbXKT/WihOoUmg7wUXekU6IulkB2pPjIEimrZGJzQKsu5J73pK3lCvItrBvW
-j0S+iVounnZhRTTnW+EAQolmZUTaJUN87TwqQKKAlpPbmG31x/lOcnuDcyBc3rmfQ2g73wqr1sw6
-8mtQy8Rtyfj6AP8ZLSWd4DztdHkHUsNvuGpPzXcrzbv93ztx/0JahGqBoX6N/sGU8JkyLIN11TCB
-jwfdDjsdWd2tL6+1Akh+iFDTJM+Jk7nQdiWYowQgOiGnEpLrKl90pMnk4bPDEUozWhPj4p1s6Kef
-St52FBijGAj4EkoYTaTROatrw2vNocs2Xe59pH5ORwAfUxNyQg10TMvQSAqwxyGPJF8eka+YU6JY
-FkYekbGg8O/SHmLPIV+XCI2knAKks+mQ8xotaeqWdQ3CMH6R+WJ0kWBiS2eGtVLJ0iYGOK3mxfKa
-D4EhQxmItMmoZHxC0TFJN2XKcYyzo0wQft4HEQs0h5YWMEmkaNtOISLaatC8XBDILOv1+UViZMTq
-LSS5AAWhuzROIzjbni6YunQyGqMfE1wKA0BxgbZXDFIRNPtPex7ohz78g9NiyLBhOyNe2RBVGivS
-6hUBUUuSODMdNiBRwhDtFsuHlO5crOxWIPfFQq3eElUQ9y8ftHQGOhZMYl8lXDuVptMOPCgXEaHm
-GfH1pUE7/qUXMBH2Byrp6Ix5hZo5WrK0D5nqFugDq7NrViTzjiWrFcLjhEhoaACi3slQuSILUJfs
-wmWIaUJA3HZkU+8Et1XE7ybSE4XBrZaOgUbCIqEkFOQNZNO75u79iNwpIDFESN5mchw1SxYDqmoc
-la85yV5LL2THn3nf54M3d17+KxA7If2oYTbNCEDM3tw9+r/+k48+UjJpU3IBEUgf0c9hws8R2GdI
-wTiA7Sb1DWwlCoCE62jB6w28Fs1YmlIUbAn9spFRvzXd5D5k/C+TDs9LCgMhprqG3PHw6PZiWZ/z
-fWFtvkEssfTFBQBsvy0qzHgtA2pShbK0nrNLJ6dtHyKkWKDX88bt55+H0v4JH2Ipyp3ztAR7ZQL6
-mlYyWZj+NpiZZBfhAizT6mWJ4GV1wRs+qW1FluDRKMWohvwy7+ibbvWYf5zEyo5oOUcLcxatxrBU
-gdMwR6Q6Oae7OD4E6YvpoHGLua8EMlkZ8eZsDBvT/R2hcgn8NdvfkMfm6/L6gKGy3u0n70b+ZQdE
-LbtJ7AI0+x0LNbqoptNyviVoOY8xYckXE71LRSR9yC8mbMwCpzeJ2erDlwPYW5CAr1y+2T36X/+j
-jz5qgznW4kQHVM8kzpkkQ/bU9VvyszrWRdPkiF4O/ddgsQD8n+V6Wn5GCa9YlUG+cXD7AbhA03Gp
-Qsw5eFhCQAg6rcHZS2CVU/gM0TZF3QtgR+Vydj2UtE+mlitoBARlSmJdy4f+Zdp6ezgeDXORa9cD
-6ZpIz6ZlwOVEY211lji8uAM45+CV9eM0ojl4F+NMy1kJz/Y9by14ksF/zBqOXv5XHrsWtcSbvZf/
-8a3Bm3sv/6X3+qdq8eb+0f/269hCGw4jKZnmqBiOYHwifvPAOVZjTKfl0lp9r5Ggh5gmbY0u0kbA
-ulwtS/XVrD53RhfAhP0ZZ0T8VLKnxN9VGw4JC1HsnxHJ8dNnR4dPn7w4SZ49fPS7h98+Hj95+P3j
-FyOR09XxYdowBDit3lbTdTGzgnHaEUurg1LXgFvp+kjhTrld06wdO0P95o77vIODy5FPFtPpmI6q
-NpZ9uruLaNAqe7UFLMYrw0HamJtJOYac75FCALV+kHZVAbz5IKWMSJm595DOvMB8P7myDPVhMt5g
-JNuM4wz8j288EPaFpPSYHcP8u3CQWFl8mNsMygxoXu8uridBfzauCw1lXo/b31JPv65ZMJjMDKNN
-Rqacc46TfFVNkpERCyxf39Y12KQeLhbJ4/k5qBZuPJgZAOnu7sJp+34DinxJw/lObC/2GmqP/KFk
-tqsQHB0E+NW6uflCwImP+dGb9+s7fD+OfU8j+AGkK5obs+016DP42dTk0GFOJZBNLwEIhxYIzU9s
-ib/5mBjNODYa0iFFR4IIWtFBPC9BmTBBpdyysEClvvc8wIQV15b0rqrZdFIsp83N6WlO9NRU6DBY
-3nRdvJ1gSpMHuuk5anag04Ze9nALuwHpEC8cTcjEzb38IYCWXThdtrIsDSHTN/hMQTpbmixyLaTF
-EEcCpSSk4LVyBkk6scF47JR2P+NroeFWAIrj5G081MdAPBA8piOOxMU9bj2IwJe0sqWLiBdJD4K+
-ZYjkXM03Dcd2c5u2sGAkFwn40d1B30Yssd+lg12Ckzt45GYs8IxWy2LeAC1R3XdNNTkj/nQrZGmq
-RevaTr3TGhssAZA1D+CDgZgwrJxWJVML/yj6Hu83ZwGYlqfr82wHoMUXYgiDyH+BsVAuBEQet5sN
-MADmQsRZaCDng4Pb0bSnr3Y3oVIEUoeiEkJlNxbGVt1JvRcxeGo96x0z6c/Mw+l0q3kxnSDbkmhY
-ldPKxhkLemJRtKG3N9F+tKYPda1YFjvSBCWZgatS8sizUPG7dpyiDiDFu7zfsZCltrUvXfqPtoj5
-x3pNXnBwoIHDw6wsAFhy7pTC5sTDI2NJ4l+qHZf5SPfYMtr0loiz2O4ZuwU6rB0PfKNCDyhCqDDf
-tgIGZcroEGsjRdl45u1VQlEaSo+cZx+k7ajAJJc7ZxaRKD9TGnPs06KcxvNz3LZgov7Q80gsOVqt
-WyNt27L/oUZrjTl6uB9gsDYNCDOOjR+ZO+9qTAJvGPWlHL4D2qqs20eb63mzHGHmXntIopRoSGg0
-3uFtjol2zRvrtfxpRBehFmNTlTru5neY+Fy8217K0LHam63kr62BBina2OauqyH8tkg2J6/Q+3Kx
-7yPEO/8xXWnrndu1CZZpk24X2Q69Cclb/G8cCJMyT95X4Vx538oRTRJ4MA29mfvSl5hhCE5IyM4H
-ikLIKQTBXZStD9fFnZnRUXtZzrxeD/0MaLrT+TC297uL633s52FDbcqCh8ALmLZWo7uTeQSDhc/b
-6Eza/G8inVGiIfhLXfREJDG3HXUn69Xid6Szi3qaBLpiZ45fz6c16va94p3Zq8dxtrOqxw0ofr2T
-WaMhb8pRrYCPPQHcLQk5ipGEXi7ybiBCmIzebNf6Ph6a/DQNsVd3nw/KhiyR0ELec3/oyxRJ3/Y1
-LoMFVgelXYq5zRPVPQZsd79X1vVSakPxeILSzpuW5/YEFXQlu3SONV5ewr6CXTkru3vDtGvFAsuD
-Ff0ECwEvYw3xjHKNndc1dNNKn5eX9VvgAvP6are8XKyu3ZERSQfZcSioNeGMjN271F4ZltB0aUuN
-wQgwXoSNbH//DAYkmhvkrXsWV0YP7T2Y7fa8SaW0jPEim6YOvWy61krWGSmvWS0z79Not1o0EUWy
-imQ4teLTlsKT4Uwkk6nMXMvqfNyHCBqcRqBs3/r8iy1Hz+mj04yrtJfl+XnomQebv7fXbbLr+sSc
-D5T6c5tD8BBdSF/TYQjp5SQlVPw4VAMCc5HXhYi/o+ucM9xl3gJ16itkeb6nnXQb3P/BuxwFLVwl
-r57Oy05LBRqVUCxToMSywBQ669/syeoxA5uhNxCDQ7bmzeXPGkScL3j1dzCG1lb3xY50D5RgHd+x
-4zjJC3x2N0MV30AI0JBapO8G7Q0Xtnb/CaxBwo/PKLDFQ/De7McbolRJNlg/Ue0kzU+6T1vEv2oA
-ZnnMzvPHmZsDsPUZkd4Ou2Hwovxkw+AcIPm82d84Egjzi3hqx4HBt5UDA16ItR0jUJVdw5PRjDyV
-tbS1XYsYpgtTtr/VOnm0Cb2920uS2wth0UYwbpISh3dvlS4xKy4Q1Oau4Xx0I1zggzAA8ozIOuQX
-9IVof+jOBDgGMLywTtD2bP5t+Y1ZPUHHucL3Kowi5NNkHjPHg1XOSl9GBFm0OOH7yxCdwh3DKLaZ
-ENqvtF5gdUG21vCW5mIV0DSwDAiQIhIF1RJ/BSQSl8/n5dVYvj1ukxQFECQc9dWgR+6Itl6OeTR4
-RC0nPipuStgGtpVgMR4OkV/w4CUR1oZsRtU5fUqOTgWHr6gISP4YIxx09ISbKB5dByf1BnWbou3I
-RQuTO8FdYz8BsEo4oVubcbG66L94brEX/SvJImYY67XE3aCZgCi3uUgSadrZfJ/7ZFwXg1ozyWp9
-RvgBIS2BcnB+7aipjxnaTR1jGJ37uUNB2OYtcQ0InptaqjaNpT78DMZwM/7MVprElC1weg+R3pBi
-b0O9YUxMzPPB9hTSRvbtUGx4iojIgbaRp/XTiWVDYlTBX5AVSAlaEOrUHRiFpeGGBeXiHRTlhSzb
-3Xb0VMcFXtiuXnDz7cngRsoab19F9lQQGYXuH/aQCVxAPG+KTuKKK68od0s0FLPDDSHqH4wSqLLo
-bE7h0nHlxZ3TlzCwZ+vlQX4rXy8gMSE3nbeYtMyuNTrKpHsbQQV5Z85oGy8GxfoWLMy1E7HVdEx1
-/J7H3grL9Aa45FbHF1MBYxiNQsuNHGBdkJedsJKbj74+2Xqrae6qJJBzBa6FE+S6EDQzVgwUbgGl
-9dvFINTBZvXVoQ4oPUFwIrB0S2Iu3QzFDvS5Q0QDPm7mcsB2ZHI6mLuoxCvnNph2uHNhaF2m99P2
-jGhLbgOe14HtUx5llJCtfc/s3KUSgPweG0vVIiIDVbPxpO/WMS4MrZXn5/uxS1/E2a1zjkzfMjtP
-AFQAeCm71jscR+q9B5AY977t08ynMSe8Q/wdcw7/CNdqCP10PHxWne5RTuY033ZFg3k4xBDrbcbf
-o1Tljb1AbPLAyibOIO038ZDz4HhjIg9PuY5FLt9pmRFD8MwjJ10C5jjrkrqke6gCgO/pgAQPM0Mj
-u3BdsT/g3vDecdzcBHj2A+/ursW7KD2pbdwF6V73/VRmN9FabNXN7rO4W0/c2u7omhM/k0PysCau
-IM/WxPDnlZUvsLK8y6ZXncU4yoYO+M33D65fY+Ut2Mv563l9RVrP/ZurlahXN+Fk1rOn44q5Yftu
-FEhim7Ovl/He9s9vb3aVDt3S9msUzNuTWniW9b5Ko2shJNqpELCujTYQYAt6j8QZ7r47vn+Sb9P3
-l/Of/t9ZdTAyrsk62T8tHR03EleS3a4oxoFMe0HV+T8O0ljPNxBHZ4f69J8+N3NZ8xR/WtVGbDSc
-997gfcw4EfHpZ9hjbmh/oa7fpewltvqWcx4WGwwGKmQvH7x58PJf+OHgbz45+j/+9Ucf7ezsfIVY
-UASwxaHijalqgUAJkbDWxQpLye/F6/OxxMLr2FdUnIIsBgbGxgtxNELtqj6t69lgA1rTo3p+Vp0/
-w075qEs6ClLwJiWAkgLOxhP8eEyINfLQkA8+sZGNLyFa1VT7NRVoINT3G0QvAJxgGezoEOmtnHqv
-HXTLo3WzMv2ByBjGPpAMGOhsPueRPMVZpvEQ+BcFy7oM3txPm763fLcwKyjQ5UMCNJGfoFZsILBp
-vTC0vwQry4yRfwlEBPszI4wsL+c2VTuWPAX6GhbNh0wAAl3ImTrqiAYkNTeZficP83YCy75pHvX1
-1i5ke4bd6umnrUXzSZ/ARXhZEMrYLck1/J8FShR4F6KzNbnzcXgZXDLK+dtqWc/humn27dtiWYF7
-QdOX+/wOXHfNP3dgL7QjA6glw2j0vhg9L6707yyIBuP7JdU4WtSLLIVHYdAY9VulG1TbpwnrpHek
-QXFfu0JmPcrlyrXvfFViKzLaMAthYkfVr0CBqN4xhB2tAUKtpM8On40fPX3yzeG3428Ov3ucilFa
-07uuQbv98+VevY7n4jpWJU5CMj+OsKYT5UEd2TqWczV+qNxLJlAXImjJFFUbhMFA1ivGJQLSsdTp
-DkSMt6NZSr6uEXPH3OtWkHiANUUIrCGwaxOzsAWwF8T6Z+wWXlrkwhnwmyYf6ZA8gFY0XTyvcU4B
-kc1m9MCEHZztEiP+vO0ULq2fnOtWcn/kjcw71JuSgJvwunk+q08LiKO1NNlG5wOSpjXIAMciC7cC
-V5jxv/qmdCt5MOra8INtmuBvx+YrHzmDp67hxAIyy6xpqxvGcvWHbgTfITwn/AxsFbE6Q+2NKQes
-obx2ipAxxoamUe9GKKjh1nZ3Y/d0qtS8pGsZ/LxF+KcLNIiZ9UcH6JUvGjEbVhyIg1dNBR3xOd0n
-Es0cIZaS1TOcpmBo5m1/gvNI9Qw4g7DCaKelqY62QR0eUWwtqic4UDje6lsUUs1/GejoJjZUbmoO
-vDOaX6zVHdwhOtw38SP+kxQl6y6lDHXWCnWZ+X0DVTx9zR0yM/jWMJgxLlYmZNypgA+Ok9/DVz0a
-+XIrwETU3SNMzM7DOavJ68lkDfm2puslAXRrHsUJkztu3RZ45JO8CxuhOebhg5HthBY+PDukaPQw
-dJwJToyAu1lkQWGIiL3nDSEIn9Zn/EXR2OpjoTNe6lvFaaIJnI9P/O57XI/va5w+Eg7r1B/Fc4vA
-d17OS8QCZNA+hKcOmC/zSE6jB9XpUYZcUskKUU5pZiXgfp3ZHMm8l2muSmXRb0sue0zYfrJ3Fg8Y
-QKkdIP/0LWA1TwXTAFzxSWpgedQJAQSwTqi1q6rgdfdOfV/u5fB6gF8MxJCcoPYYss1w0qt6+XrU
-FXG/WNaQ3SQYS+CNmzydTXGpd2HjvjbH+Kcuy9IoRmB2p//ezY26TwzCHWXD6GLXEfk1AizhDMGT
-Tbv+ESonjE7yCrhkfABnLXdmrDI5sD1AmVPt69YZpkGF6ZsAWLhNWqa6sSni+CUfk2P6Ltuewwi/
-hcsMc1uux0qc3bc0XgM1/Za7LzCfK/qWeIoBws0T+H5gJunChvhyZipEDWKZFRzUyfZindSdkwu3
-MbR/WSXAENkt13e8/8lJLlku/d58rXrypF59A2bKfYYhRLhvwPbCuPFqAXZR2PH1ekUbajYTiHRu
-6ICgFXknHcSuog4b6YCRkR49/f77h0++dghJjLIhdUq6LkJrnU4RrY3J7wAvLkPBr6Zr8kG/KoPD
-6DCHn4jDuXS6BRaS7l4QSAii/w0H4jN0kKoHnUghhBCCeIBYPO9u5jE1o9i315q52bzlB5BeyvD1
-g/Trw+deO+ZLwCAxJfUpgDDya4QRQwGnrPCmgZZg4GVkGUzNOV8uDScCdslXEl0JArZZB9a8bySN
-jASk/l3fSqmHBG/GwZsNM3nImC9epTAqNXTOlVFJPAOPjzOjjpJDgu+Hct50ixCN+PxmBvg7mAhV
-u6Ai0D141D0PtxJDfuT2ZJki4O0qL2Bgr67qZujjAN/S7ygR2x5hIUASAoC4r6blEsF0Gd8eMetG
-vAq77utdWLwWNY3VU0ykbiZ9aUsRVlWqZ9+yvRcvnz17/vjFi/FvH3/3rGcCnhuqw/sOMHlOEaAG
-BaBuoAvFQ9N2ewlgveZAdOWInOxztxFoLPxirF50UpGMS7GN9xvaS8jKsF7VhqdAhhXIaTHHZt9i
-9LMmR8poS+krPaZ5KyHuM7seJbuPXYpOR59AimpiMM9zZGL4eXti8MUvMjHdu597Zbj2ad2iOv1M
-+kJXKr8b9/Sm/5byEULykPVqsV718Z431LqhiNLnn/rJzVqmhC0bW96d1edei+63ZdigbQPYP4+n
-fVdz6hjKqVMg6PDM8GGAuXpXXa4vE5q4anWdm80OWF6WWF6bI72PB/2AMROMW3xuxvLW3JdB6z2r
-JuCteC3wWjPBsaWR7EqRXfokHNlY3o+997/8xjtywj2OCG0Ytt9mK9I6yDDQuzjsu3rYtTTS39Bc
-8nO6TjD6RUM6P8yoIz2f17v42+vqvB7rh/8Ae3fXCGTvrr1O6CdbnRI7LxDlHfOPw7ce2r1haMt9
-8Ae8mn6Jb0fmwVt4Vi9Xo4RuUjtGEOW0sfCh+8YQ/pdwSlOnzX0VlKjM/qf2pLmu11IRHJTmOoVO
-6HPg1xeQIpcAKqh5BGSHL8zPt3BA8ZfQaLuHhBALxWX/mSKXC6iO8mpIN0c7fZxiVV2WNawr/OBJ
-dA99unzx+NHTJ1+/8NbEL0qrcjari5CX3f/Uk0JZc9rUk9fwJ1WSyO0luS1/AAjqnOD0ttqGcmsB
-DeGynkmeXswUMa1WqPsdUgahtHk7t2xGRv524rMXe2O2z38Z8eQhZOQq30nYa0OxT81rIyczUYFY
-2Eh34cWufrGLn/sirSkz1mXGuszPGoaMw1zzYEbHKK+bA948nI5BxQnW7V+9/GcMiPvm06Oz/xrN
-2uZ8OQeRo8OMPePXYj78Dv0KMsrwmHOKPVFycF06/yPbdRk3dxf1v81kWS0g/dtDsNI2yRISPVkj
-CZ0YQ5js4m1dTQ1FXgL8/tTcPSEpMohK7MOEQDJ0scNE39IX/OP3j59/9fTF4/HXj796+S2AcFPn
-Rvh7l9S90XeUiOHJN0/VC/hJacSfHh1+80fzJtOv7sqPHx4+f5LvkdMH/G0Kwj+HT3Qr8ARLPH7+
-/Olz9QJ/U8qLh0cPv1Nv8DeN67vHv3/83Qtwo/BGOEz4H+jPkPs5xNaH1NCQaj3ZAiiY1ZSQVm0Z
-oInhO3Yr0f4h+Dw49k0BPHXCj8dws1+C4BSgRoXvx5ADDChKKrJdJ9g6RgBozj1raDgSM4mk0OIZ
-apV31aK6+4a10oT3VMoOPTesVlawp2IMHLphtUQQPZWiJv+D13pWmLvNTWtFeu2tVooOiW301h11
-kweF9VUXXhskAu7BadtBsL7impQlCxDBSfAgpTmc9NaTvhmi9gDS2u8oBwCyPeE/8NGV0iXO6foe
-wVWTnTnmMctvq4W1W7c1Vppf+GzMuTmzoBKqNO78nnXtz+j0gPilu5Zxii0jVgxVuq0uL+qNvKDL
-ikRNcKh2d5IsVfRstm4u4qFpdh04nUyn+2WbtHz1vF1OSEh2G4u/h3evX89gQ5E0Se9ofn3Xvuwa
-RYyLd/emFdaNaCGncEsVEBZMNQrVJPP15Wm5/GKboaW37z2oBM6D6VNeR1dJkvsIuUEEHix/2o2C
-heWYSKTuu3Fq6V4SqSdzvXOHK7icWeqNWKle0JxdlSLOJNP6ag6ZXRL56gttPmLzAsPMIuWO/a2s
-zg0KbVE0XsEMgRXH9RCtdH4PKWfdfuihZM1Waj8OE58r7hwtK5aR/XpvL3NwF0OAE3fy3176cLEA
-rwO8O2wmAiYXzGywxm0W0NoFMMbBDXhBDysCCI9NqQW2YWIRyUhvcOLbgCd5aV5S6giXqAqPbQiA
-bC2luS6ZUqO0vaTbL6eu2Vp4/DXeuekiaVNoZHpiXiov4BogozcbnnK7oQcCXO+hTxXEkPs9G2w+
-ISDJhiGK1gognXC0bz644fER5xl4M24g+VKBeXuvZUAIOHg1J1u8N3NJdlqCbrxJRmnTCSfiT3YM
-92Trgq0Jes9Z+HCyfpuRDmXe0KTos9VDzOmDmZjnpGWi5ptJvSglwyEqB+3cY0oeICfXOViNYYIT
-gLRVzMkDLx2lMS+QjTQMkN5Mu52SRIQ0PxThWdFPsZCucFzym0DpAVO4v4MsROBM36oh34XH8uMm
-B6dqJd1iHn5c3m7o/6NIEHD0YYSZcaeG0lJ+44nsYrv8lz5GO45ivpTEvXNAQ4tyo6ULVGga4iBx
-CVWI5zWdp9BARBToluaZBaueeSe//6YtmthOguofu0O51fErMBc3prehn9DNbyOuaKNWozN0HhuI
-zoFSj+CtsGspeq45ejSff/55At52rOzK/RfBjH8yTH7lSvinekf5B668Jz/EijdGFC8zYBjD5JMc
-/neDtsKPb97wffrw/o0/fND60O9uwEWVl4+QMFQTynZwtoP7Tw0IB1hwxM/kR71o3XkxLbrygSXR
-GJ9+HhBEJ+21r5xYs+mHrhjcqeHZbw7ev15+sUGw1Fsi+TxsjrbCl6iyvSyNxDYN9obZr6giBvKe
-gLY13A5WgWfejkjlqJeL2v1Ncq+7V17SSPvJ5xSXRSXyvq9377cCA/gN/qPUmIjmxRwN0EpRrRBy
-NfOCcL98daZ5DAniWwkTfoaOxRY9OOjlarZHNuq4q9HI4UX97vxSq1e8cYHXJqO6UuOR+4odGsPg
-ZvBR3qEdHpXvVtAF6pA5Y0jQ1Nzzzb99+c/B7iDX2zf/7uj//sJmCHRpAc/LFYKBRHNE8l+XlaFn
-jGyLpgw0HQ1yBsYj5rLL6acwv4trQMQbJu8uZ8vFZFadDpP1cqb+fTDciA1oCqLxxXdLHCa/PTp6
-htrD/D0TDupYwExgBCS/YVe/yBwybqqfAoxaP0li0bzuqmGNCPYcEEh+UWNZuvGkMDzesI8ZZnSm
-X/JSjfPtxA7Q/NmXi3EwGKPD6Bj2YGrXYYypkqBgyomjMbUngwvDMzPtgLjkJ91Kq2ZsXvB7xAEr
-CEABU3Oa3/xW/XwQVHEOzAYLmcogfVPFYIFYK02OGdQYARj8T9XUjV0dUNj90i+5/MVqtcBHhqkN
-IlNgpsY+HR3JU0z4KT7RenYyUxUmaC0bxDbUlyTycveSsJOjO6VPqVao0EYVng9xu5+8fP4dJB0n
-W6TpMT4ZJYkIi5nzLhUMaDnkJeXWuDH0clmOXWYs01WXtdvPZkRlQeLG3Fnny3q9yO7nLegMVExz
-WXOfIMQ4Uj7LDAzaeml55UXqwLBaICbbI+k8V6ZdwXBz6HYNzBeiulHwZEXJ7GfRFBCug8iJAi1Y
-MNpoKjXzlaT/3k9BEAuDOLhcB8CLnyVI1g82VWOkiovxdAnbwiwkvouk9movaNCwv6ygYtgHPYwD
-TcnSv/R1nBj0aD1/s65XZbwHYb4yGFlnh8xLaZ+Ba1sTYdrkKeuRzZxafILzhgZsyrcdyDvNgsaB
-0HF2KwRSD/EjSKQO6LnwD0WUUmEvYMjC0Hl1uZwPZ/pL9vg+fBqEAvWH/3Sm8n6E2mzKlQSe4eFe
-EPwRgOgVZYE/dz5GII+eeRmOAA4Ly0EwzT1kHl4Avsoy/VMGu/cv8J/mLxR5msJ0jQ7zQYRuw8/3
-7mTHxe5PJ/mPf7GfSez78++eLiCUxrk96BsMOOg3CWjvYH+Dl4ZErRuBqEExAJ2LkPGQ01OzXgAH
-96rZaJAH2eiymFvKfzCCqp+xT9H350vI8clO7c/LYnapwQTH4wmesnwbBvqI330Pz1SwLJA7Z/9s
-aAxAEHDNQc0FZE4sZrjUgJN5DgdLkfzq3n2dJnJhhB3UuyXgYH+5wKvYZUGQ7WclbBBoUIIOknUD
-iqHToqkmOHdOcW3aHw2inUYiAacsOq/Ez0qiW4zYtCwmq7ERZkBfVJk7l79DyJl2SYgyMfVcNCxP
-BqdWRPayDc7kucl8Hq62n17Ljpi87eLxANLJ0PMU0b3MGnRAQLewmYORqJ7TM3+mYjfR/m9jCxPL
-AYgf+IFeMnmbaPaHZbEgkQZ2IAQ0lJBfg3zezY5bmU1rKPMcyHiV7KBMa+hbKSAkukEiwLCy5988
-2idJZ2/vdA2oABhpM6qX53tV06zLX/+7Tx5sodDASegJWaJDRQjhuaOYoeEmZqMsm4M/pw8nQC67
-RpCoQZ+Z7hthEol5dZ3+tTWf5uNwKnlF7Fy6dTkgBZEsTkSn7uLG2iFuKD5KdlvTlA2yWzJdsDcY
-7Wse5UALY4Ae4bjk6bqamTqaUbQDxP6HybxcGeGJcGuGiZmx5bWRU5fFOc0k+ciZP0iQ8EjYUJVP
-q26HOu6CbrTTsSNdfhCyFmbKlCoRStsSrHajjupA9aZExe9VSXYH5xbKbQx8DZPuRpw5RbgXqbg4
-0NDw3KhNzX5kZSmSpJbFFbkWZ+nLhvFZAOsNTm8eTj6IwXjyxHiV8aV+xP9mqRxYpr42SKPtEp6V
-UjKul5epB9fN6KT3c55fdJWBkinufRmc11/BwfbQvP4tvc50A7mCiQCBQ30L2wJcVFEOUdVryvKd
-MUj4xPOYMDrOAFXAcMRZ6VynzW4AJJllos7GdkAjNDrCg83sFu1BAABINiLZSDaGy4G/amKpDvPZ
-dzCTF3IRJaFI5otEDy/0tsGgqznfWtGV1Gkqq3lJjtpGxCAMA+K1sBf402qFaBpNwk7jsBDjZ8+f
-/uGP7mBToV6CSdHBgrxtBdcB+duFXuKAFJPBB5lMkQ8VDg+DGxw8GrOQqGjgGTwXuvnzDhxNO/tU
-epjsnLlff80DzIg+YvJas3qv0SPQ7Xxz9Oy3IZ3xkYWFOGZJqqJ/vBTmhg97chfH3tN+8Zzzdr7c
-IQh/eBWckIZSUfI84NejJTF28w3eEO+1Uhvs7GNt8mWnAl4K8JXTfOYZLSIlh74dm98SHwh8MiJy
-5yZZ5jF902Bze5YRoQatwEu9O6RcpL/ZhmFKJjjQdtldYxcYRj873CC+BNJJS3PP2eCjRy/uAADA
-hGtYlm8QJnsqym8kBui80vq4a98P2lQa8vONFwR9W1Wk4MGC2ma3PaRvcaoS5FkoHKk+OtAikB/N
-EFZByhAYgdowouG53XwJB7mbjTRtUfwt5mkWfki6HrIpKHOQ2DOdRYUvRVrIXCvhjN5EZqCGegnn
-F5qqfZ6tLLJ17Mw5Y1KxBFK38ryl4PWcaDjbSLd5G6jQVhrbwL6UT8dM6zzuYDXfshxAp9V2xy4d
-t1pb8A906kqaXh5V5MBkxIcAzcw1GiSEkqO3VZl5kZpFhzzV0ffCS6wMCO4WEAQ25uNaPhMt5pft
-DSbt7KdySI27pV1p0GNc3je+ovcGfjhrQmLwKutA+tlqr3tTkXfi/b/Xpg/2Be9Odgny56enG/G5
-kKwBvHKbDe900LO6xwzF6QjFKkOWKA0GFPH+sUkAaKPMqtdwITSVCekzkTDAvJ9OseVCYC0mlCXY
-U/7f8zHHxXuGvoC8dGT8MHsENajwh9i8jGCZniR3wZI3AmGPPmp4nMrOljmxRvr/iJCq2Jy0Lwqa
-gtTsXlF2TEW7hdLY43dprv1TdzhlAQV14kCodhAWKOm2w5vNYO5uL3PAnrIyhDMhYM46acSlrLOa
-/03GBrNAqF2O2UNaCYcwSd69E8/Wcfxg/ySiDG/ZJPSy4e/BoEOZnf6JVdmBAnxD6eP9v5w41feA
-xHdrJdUjai8u9hbX1S6zuZMdoeRg3kg8LKp9i9Omnq1Xpb/6QUomUC1Dlg2b08F8FeYEgKm/6bT/
-pTXtWvs2ClfXvrRCgcoxoQxlrlxIP8KtcF729nA5UTsXTvCDDz/DKGCCkgnrwrFNqTjIQM3GBcAJ
-71kAnPxh+CXyHXylJhI6rQ2OxJx0wg41j2byCOv/OFgWw8w5+chyJejRUMGJJQjY7Nj2/qClXt1q
-bfBjb43Y/sY3INSkBqv0EoNcLZAHdxodT89qvgDh1UdMv+C2A9BrZj3t8qCVENXKDMk2XdaLhUUs
-q9engDUKNiwPYFSYMV75MIqaR5MjeVTohzFDO1whYJDmmDs1ktZ1YgTm5W5ZnJfoEWIENmLEs6vi
-GgzXqA1rDK+CqYFLIdluTCdNt4asu7WDwQHAtNmjqFgV4IiNuAZU51kNIb+GCk+vzSqbWgmAjQTM
-GbgOQMazgoZzViyhV4cJJDFAgCJQP0NtmKEH3OdRU183zEuO0AJwCIp7PB5OS8qsYwRQq8hf1fWs
-GVXl6gz1+Bery9ne8mxy/99+cm/AlZgxnq3nBAJoDqX1Jeq8i1Vw+9zfO6vrvdOCpIWKVFkNKi+b
-VtE9r7AQjqf5R8qypDLQ19wlpaFrmfQHNkqtWflmbs/OrYwC7YPGqg3J2b1Izut6mkAqVSDPCZCI
-EEYgfe3tsfClu6nEH+07E5WD0MkR+v8K3r6COSw0dFHRUDIirAbp2uLp8cP/p7ZraW4kKcL3uXHh
-3COj6JanJcyyS7AiRMDuzMZwWDgMcBkbh2QJSeGxrFFL8uwS/DD+HZWvqsyq6pbMsBdbatUjqyo7
-KysrMz/wUKwI1RpwUdwMjJY/+o+zH7+gz+EZ/t/+MDuAUauUi3CYXI/TksVoCZgs0nMyvd5j0+hl
-rCRZtyFAgLuvi+AwA4qM96OiVB08WVAFHLcWG9CYMb8e5CK4Vc+xNU+maSoQqcpDgB/4D0n3ubt4
-XZyIV8XNmMSfKRoTk8+XuhOjJkLJkdfF+HIfr8bALUQ8/EbLA4RTwOeKmvF2vgTfhX/XXI3onQvO
-XkM0SYQQugzSlkn+bSjG4HqwGFf6gGg7iYbml59Qde1s0sDIvZCpU3OTmXA1m0n52sxRXdB60yp0
-cMRYEt8VwkCQCs0p8fy1MeZXXJRwHpDF5/3FDtkype501ObFoqoECcGecZpcLo4ZcjvGpsSMYUDb
-ilDkoSMyBwuZR8rx+DD/qvLOjiu32fJsU7PmF0hiqb+PVotP8/US7hL9y2grvJwQRa4TfDCO0VEp
-Sr73/euvCqzAF9GCGwfZHvvNwG+D5JaFufg+QS6vxZyq9ZuXvbxzJ8TT4muaGaMnS9ug2rx9vnGq
-jidToeOxk0/CodA24iwjg7NNi1xMO+YXVoP1x624Npm65U6ANinCVHk93K0Om3vweWJ3qS+vvv5N
-go+KhSxvz1zpe3XhrVeYc5JjLaEr57Rkask0+IfizUVDr9Fp+TYSLmq8+lcNLRpPljc4y8ucsln7
-/BoHF8RGuXUH4yUq7JDjGXUTdj4pWRoNqUR5Y/zJqpBeuvbQj4PWxq/MWDD8mx7ZKMiJrff74sur
-y19dXV0JlKn+NVQn/YokG39PR7uOIgzbnPnEvwQ0S8eBbq3R5QHTtGMWK9CNc6F1YOdR5LWC9URx
-0+VrnhTCWsNXn01tMphau3tXupMYn6wF3Pa8jg8MIwa9MAlCQZdNPsIzsq3mG+EqlN3F1EATPjnQ
-KgEj9ZK3X0uA4DJpZUC3HEhlQcSoDDGkhMF5HKW67WaL9rBdmgO99oGuZ8VhtjT/63W/X0gCBvea
-XYbmf6lprltpGGSg5nMS6Vw5631EtxwTGv0WyeEEF6t7SXgaTAw95FgOIzo5312SHxRB2viCT32n
-bjd+IfqjTjHg2/b6bASNGNrE2bYwdxm877g9c2T0Xn3T5r4q4WgtHu2c3Lao1gNMRloX1dNgDcRX
-swHGmBRlW9RMjFqekOAaKddwSnuCP7PSer4HsiaFKxZjk+B82ZA6vHk11Z7KDhT213BqCBLqBLGR
-m4UPmEpLdtAzK+Mc6c1esHlCyE5Xm8kovpkS9pJbiT5amfpN64rAK35yTRKoecL2iSaADlaj/BTU
-YWCeIWHFlJczVcds8+FF6Rh3B9petPe8mx6dxD5zTc3xVmJzkiO7DYmKjvD8JqHu5hYP1BeOTBs9
-3M/hc1UOqQeMRlpvBUvG3CH78wtbfS5K7e2iNGGt/nXphZbocXxzHRChglCJxnkyHM57B7HVVt9f
-l9HrnOKd295i8KGOYLS4poxYnxeM12kkDFWxwZnlXpUj0YRBdpvAoex5JakQh2iIDUTOLHBgBYt1
-6z53enONueGs01jgC334AC7OTGaMCop+vj6VEa6GgBXA65dMtNVOOCjmVjstE63wEmpmspRGM5g9
-r+CkqzhfAZGd2P0yyGprGNQAzjEXu9/HkXv/PmPMwmBZSIdaaXoTZSlpzZDrFE9XIENBwQ4aKDPA
-2qBDh3LkJRjVukISZfBcwmLmMWJFRGMOyjnh2TOPzidOvydMPCcMqEqLi47k3Wa6blPfs5uCw6QS
-1rDi+vvLiZ0TZQ8xsbr2pY+oSHkz6jVxgXNLe9igZTuzIqAXPYBwlzU3lqEzXvJxemiPYudCFWOL
-6Aiu6Q6qsfY4DE4ghCunTtFZUyIdCNSqotAbpjeyn31GtN0F5KAMEALfCR0thL7pprGDPBWH48Pe
-aPLeuo1IxaRkvEDF+4wyTcSxa9xy7+2bP77uvfj427/9jAPERw+L3d1ht55++Pj1X//zc58QIAT7
-+2j+JFUA5AaQoHIJKMcAt+YwY2ilfBy9jYgn031XeHpLRHtd/J3SWn9LWa1DOb/xCe6vuuF/Do6v
-z7j8vcxSZbvkKeYNrFxxgn6GZoZHI3m2W2wfb+WpUx82i1JdL9Id2grT4q+Wr7wbEH1s+HPTrOQT
-XCTfDSlWGtuh2zTRHl2ZIXYy2n/aU0fLAyRPh24u6H71Ca72wtCQwt95rC64jcFHfCmzO2zG1xt7
-gnG9FBAseb1xH7aHDx+KProfNfiADAjF0L0Mbr84wtMycUvHS4VAOvtNSux67EDgmsklY8WM1oQf
-CdVoK4WHbTnv0IGS1bqCG5Dv5n7iIheqnMVZhLs1CdEOofU7N0XXzSXMjftXjS4H7l+JMiDVOHwT
-GV9HuaS+jQLxY92UWO2YJeUfSAsti/sw3J0myTeVc9c70oqcQxKNjtEAju2BADvMMno8z7EfL1A5
-s1RqPen1em8+CSRA8XaJzNysEdZHcAscSQyuAefi9Ya0JGlMu752nyKJlugU6eNU+O427LytEa2R
-+EzTG7ynxC8Pc9cVX4KXYfQ3dU4v3C92nJlnQinJ6FlNxjj+hZApirun+SSlM4twz1euSp/wEUnu
-3XEMJ5jAzb72CyuYdGZvcoLRQ+baYy/WJRy8tXjKr5a7O3tqywBNv5v+c9GCNJ1OukKIVsQMcoUa
-cGdGiCdYbEYHKOsiuXGN4J0pbMu3jaa1bAdsWFXYzW3NJmdQufv5yzu++DEz8ufHdwSNGd8FnRfd
-rE1badKNkHeA1j7aVfitQxvYOGsH6xcU4rLoDDCIXw/9NpBYQ8COj+gpq7iN+JqQDSMIa8OleQbt
-6hUEu/Speznrhf4s8h9n4HqmyLebZc3y2Ue/wd7hHlWRKfc4TrYOwW2eFO/d15vkd8mUDNkpKwz0
-Pq4RXYTRD7X8zvlumx46my9TpFc8PSphXeXlS62bGnReiX3r1CRQ0Fego5+21AZeVT1kjLOxXbaL
-i0gdrAlmyHACjwyhMAc/3bvADNK+mZL+cTY3w85CokryPDWCZFpGm1TLJuRpSNQJn9x43eAZF9ha
-RExlY/psthjv4xtFoovm4Xsy07KfAhjnsWmfG1eiec7kQHmQFqdGrjtAEtIMeaL4QpMdWi/8nNUI
-q/fXT9fz69Hw5hVoge7r3H0ajy5/0aYQhqYyyfQw5C+U8Cphi+6YlPwiG6DT26+3PTSqTJf5G1SZ
-IJ+Kz32R1azhV/mSRnYhtL3Uj5Z+tptu7lYnVp8KLZ7FAVLnWVygqGlnBGm5gxmkyP+JIWxzuet7
-+B2u0UzBU7yRLd3CHyxZkEeoXp5N1AzmOYUKnGAW1cogzu9BG2E7szidaAdGPb9lPoNptuDFvWfk
-VtC6PzgZP/mXa+rfny1TeXwxefnhob32rDGK7fhzx7h5nC9+ikGyg4AeZLO70+hgol8hhHMwzmIO
-ChBf0TkmUrgy/qhsgIDCWYfI1XJcJoHd2DCYfUrcy7ePQb9aLsFbApK7iHEJQabhOXyH1unacljG
-QfqKlPbwusyq2sQtzPIZz9uUD5JqxEqdO56v4zfiU6LR19DyO7sSelC0i2LhOKr5T/sS4/unFuwF
-o+llogkQfMjuOvGy1L7t96rPm8gjISJHkZ+lCMwWbmuk1JFU+H+nT3XWSmKsyuc6mC+O0EPcQSZ0
-4Q/95sIVmzBJwIZ1wjS17cQJXDBa7xZLx+Tu3fHHzMGLj+PD6L9MgSqn
-"""
-
-import os
-import sys
-import base64
-import zlib
-import tempfile
-import shutil
-
-
-def unpack(sources):
-    temp_dir = tempfile.mkdtemp('-scratchdir', 'unpacker-')
-    for package, content in sources.items():
-        filepath = package.split(".")
-        dirpath = os.sep.join(filepath[:-1])
-        packagedir = os.path.join(temp_dir, dirpath)
-        if not os.path.isdir(packagedir):
-            os.makedirs(packagedir)
-        mod = open(os.path.join(packagedir, "%s.py" % filepath[-1]), 'wb')
-        try:
-            mod.write(content.encode("ascii"))
-        finally:
-            mod.close()
-    return temp_dir
-
-if __name__ == "__main__":
-    if sys.version_info >= (3, 0):
-        exec("def do_exec(co, loc): exec(co, loc)\n")
-        import pickle
-        sources = sources.encode("ascii") # ensure bytes
-        sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
-    else:
-        import cPickle as pickle
-        exec("def do_exec(co, loc): exec co in loc\n")
-        sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
-
-    try:
-        temp_dir = unpack(sources)
-        sys.path.insert(0, temp_dir)
-
-        entry = """
-import sys
-try:
-    import setuptools
-    import pkg_resources
-except ImportError:
-    raise SystemExit("An error occured while trying to run %s. Make sure "
-                     "you have setuptools or distribute installed." % __file__)
-import pip
-pip.bootstrap()
-"""
-        do_exec(entry, locals())
-    finally:
-        shutil.rmtree(temp_dir)
diff --git a/vendor/pip-1.2.1/contrib/packager/__init__.py b/vendor/pip-1.2.1/contrib/packager/__init__.py
deleted file mode 100644
index 96068312a732e1ac53d67ed19ea3a05cc7aaaaac..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/contrib/packager/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Port of Ronny Pfannschmidt's genscript package
-# https://bitbucket.org/RonnyPfannschmidt/genscript
-
-import sys
-import pickle
-import zlib
-import base64
-import os
-import fnmatch
-
-
-def find_toplevel(name):
-    for syspath in sys.path:
-        lib = os.path.join(syspath, name)
-        if os.path.isdir(lib):
-            return lib
-        mod = lib + '.py'
-        if os.path.isfile(mod):
-            return mod
-    raise LookupError(name)
-
-
-def pkgname(toplevel, rootpath, path):
-    parts = path.split(os.sep)[len(rootpath.split(os.sep)):]
-    return '.'.join([toplevel] + [os.path.splitext(x)[0] for x in parts])
-
-
-def pkg_to_mapping(name):
-    toplevel = find_toplevel(name)
-    if os.path.isfile(toplevel):
-        return {name: toplevel.read()}
-
-    name2src = {}
-    for root, dirs, files in os.walk(toplevel):
-        for pyfile in files:
-            if fnmatch.fnmatch(pyfile, '*.py'):
-                pkg = pkgname(name, toplevel, os.path.join(root, pyfile))
-                f = open(os.path.join(root, pyfile))
-                try:
-                    name2src[pkg] = f.read()
-                finally:
-                    f.close()
-    return name2src
-
-
-def compress_mapping(mapping):
-    data = pickle.dumps(mapping, 2)
-    data = zlib.compress(data, 9)
-    data = base64.encodestring(data)
-    data = data.decode('ascii')
-    return data
-
-
-def compress_packages(names):
-    mapping = {}
-    for name in names:
-        mapping.update(pkg_to_mapping(name))
-    return compress_mapping(mapping)
-
-
-def generate_script(entry, packages):
-    data = compress_packages(packages)
-    tmpl = open(os.path.join(os.path.dirname(__file__), 'template.py'))
-    exe = tmpl.read()
-    tmpl.close()
-    exe = exe.replace('@SOURCES@', data)
-    exe = exe.replace('@ENTRY@', entry)
-    return exe
diff --git a/vendor/pip-1.2.1/contrib/packager/template.py b/vendor/pip-1.2.1/contrib/packager/template.py
deleted file mode 100644
index 40695893656481ed7c43bcfef0000eda2d41aba0..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/contrib/packager/template.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#! /usr/bin/env python
-
-sources = """
-@SOURCES@"""
-
-import os
-import sys
-import base64
-import zlib
-import tempfile
-import shutil
-
-
-def unpack(sources):
-    temp_dir = tempfile.mkdtemp('-scratchdir', 'unpacker-')
-    for package, content in sources.items():
-        filepath = package.split(".")
-        dirpath = os.sep.join(filepath[:-1])
-        packagedir = os.path.join(temp_dir, dirpath)
-        if not os.path.isdir(packagedir):
-            os.makedirs(packagedir)
-        mod = open(os.path.join(packagedir, "%s.py" % filepath[-1]), 'wb')
-        try:
-            mod.write(content.encode("ascii"))
-        finally:
-            mod.close()
-    return temp_dir
-
-
-if __name__ == "__main__":
-    if sys.version_info >= (3, 0):
-        exec("def do_exec(co, loc): exec(co, loc)\n")
-        import pickle
-        sources = sources.encode("ascii") # ensure bytes
-        sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
-    else:
-        import cPickle as pickle
-        exec("def do_exec(co, loc): exec co in loc\n")
-        sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
-
-    try:
-        temp_dir = unpack(sources)
-        sys.path.insert(0, temp_dir)
-
-        entry = """@ENTRY@"""
-        do_exec(entry, locals())
-    finally:
-        shutil.rmtree(temp_dir)
diff --git a/vendor/pip-1.2.1/docs/Makefile b/vendor/pip-1.2.1/docs/Makefile
deleted file mode 100644
index e4de9f847c482887a61e83152424d25e8f1b435d..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/Makefile
+++ /dev/null
@@ -1,130 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = sphinx-build
-PAPER         =
-BUILDDIR      = _build
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
-
-help:
-	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html       to make standalone HTML files"
-	@echo "  dirhtml    to make HTML files named index.html in directories"
-	@echo "  singlehtml to make a single large HTML file"
-	@echo "  pickle     to make pickle files"
-	@echo "  json       to make JSON files"
-	@echo "  htmlhelp   to make HTML files and a HTML help project"
-	@echo "  qthelp     to make HTML files and a qthelp project"
-	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
-	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
-	@echo "  text       to make text files"
-	@echo "  man        to make manual pages"
-	@echo "  changes    to make an overview of all changed/added/deprecated items"
-	@echo "  linkcheck  to check all external links for integrity"
-	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
-
-clean:
-	-rm -rf $(BUILDDIR)/*
-
-html:
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
-	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
-	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
-	@echo
-	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-json:
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
-	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
-	@echo
-	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
-	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
-	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-compressor.qhcp"
-	@echo "To view the help file:"
-	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-compressor.qhc"
-
-devhelp:
-	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
-	@echo
-	@echo "Build finished."
-	@echo "To view the help file:"
-	@echo "# mkdir -p $$HOME/.local/share/devhelp/django-compressor"
-	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-compressor"
-	@echo "# devhelp"
-
-epub:
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
-	@echo "Run \`make' in that directory to run these through (pdf)latex" \
-	      "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through pdflatex..."
-	make -C $(BUILDDIR)/latex all-pdf
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
-	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
-	@echo
-	@echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
-	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
-	@echo
-	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-changes:
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
-	@echo
-	@echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
-	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
-	@echo "Testing of doctests in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/vendor/pip-1.2.1/docs/_static/launch-jnlp-slave.JPG b/vendor/pip-1.2.1/docs/_static/launch-jnlp-slave.JPG
deleted file mode 100644
index 01b91e7f76d66650f761eb9ed5babaa2b2118562..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1/docs/_static/launch-jnlp-slave.JPG and /dev/null differ
diff --git a/vendor/pip-1.2.1/docs/_static/slave-launch-icon.png b/vendor/pip-1.2.1/docs/_static/slave-launch-icon.png
deleted file mode 100644
index 3af338c90fe15ec99114509af9db8741711c0c9e..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1/docs/_static/slave-launch-icon.png and /dev/null differ
diff --git a/vendor/pip-1.2.1/docs/_theme/nature/static/nature.css_t b/vendor/pip-1.2.1/docs/_theme/nature/static/nature.css_t
deleted file mode 100644
index c144c2259bf875b2cb015bc3126f0e3decb34d7c..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/_theme/nature/static/nature.css_t
+++ /dev/null
@@ -1,237 +0,0 @@
-/**
- * Sphinx stylesheet -- default theme
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- */
- 
-@import url("basic.css");
- 
-/* -- page layout ----------------------------------------------------------- */
- 
-body {
-    font-family: Arial, sans-serif;
-    font-size: 100%;
-    background-color: #111111;
-    color: #555555;
-    margin: 0;
-    padding: 0;
-}
-
-div.documentwrapper {
-    float: left;
-    width: 100%;
-}
-
-div.bodywrapper {
-    margin: 0 0 0 300px;
-}
-
-hr{
-    border: 1px solid #B1B4B6;
-}
- 
-div.document {
-    background-color: #fafafa;
-}
- 
-div.body {
-    background-color: #ffffff;
-    color: #3E4349;
-    padding: 1em 30px 30px 30px;
-    font-size: 0.9em;
-}
- 
-div.footer {
-    color: #555;
-    width: 100%;
-    padding: 13px 0;
-    text-align: center;
-    font-size: 75%;
-}
- 
-div.footer a {
-    color: #444444;
-}
- 
-div.related {
-    background-color: #6BA81E;
-    line-height: 36px;
-    color: #ffffff;
-    text-shadow: 0px 1px 0 #444444;
-    font-size: 1.1em;
-}
- 
-div.related a {
-    color: #E2F3CC;
-}
-
-div.related .right {
-    font-size: 0.9em;
-}
-
-div.sphinxsidebar {
-    font-size: 0.9em;
-    line-height: 1.5em;
-    width: 300px
-}
-
-div.sphinxsidebarwrapper{
-    padding: 20px 0;
-}
- 
-div.sphinxsidebar h3,
-div.sphinxsidebar h4 {
-    font-family: Arial, sans-serif;
-    color: #222222;
-    font-size: 1.2em;
-    font-weight: bold;
-    margin: 0;
-    padding: 5px 10px;
-    text-shadow: 1px 1px 0 white
-}
-
-div.sphinxsidebar h3 a {
-    color: #444444;
-}
-
-div.sphinxsidebar p {
-    color: #888888;
-    padding: 5px 20px;
-    margin: 0.5em 0px;
-}
- 
-div.sphinxsidebar p.topless {
-}
- 
-div.sphinxsidebar ul {
-    margin: 10px 10px 10px 20px;
-    padding: 0;
-    color: #000000;
-}
- 
-div.sphinxsidebar a {
-    color: #444444;
-}
-
-div.sphinxsidebar a:hover {
-    color: #E32E00;
-}
-
-div.sphinxsidebar input {
-    border: 1px solid #cccccc;
-    font-family: sans-serif;
-    font-size: 1.1em;
-    padding: 0.15em 0.3em;
-}
-
-div.sphinxsidebar input[type=text]{
-    margin-left: 20px;
-}
- 
-/* -- body styles ----------------------------------------------------------- */
- 
-a {
-    color: #005B81;
-    text-decoration: none;
-}
- 
-a:hover {
-    color: #E32E00;
-}
- 
-div.body h1,
-div.body h2,
-div.body h3,
-div.body h4,
-div.body h5,
-div.body h6 {
-    font-family: Arial, sans-serif;
-    font-weight: normal;
-    color: #212224;
-    margin: 30px 0px 10px 0px;
-    padding: 5px 0 5px 0px;
-    text-shadow: 0px 1px 0 white;
-    border-bottom: 1px solid #C8D5E3;
-}
- 
-div.body h1 { margin-top: 0; font-size: 200%; }
-div.body h2 { font-size: 150%; }
-div.body h3 { font-size: 120%; }
-div.body h4 { font-size: 110%; }
-div.body h5 { font-size: 100%; }
-div.body h6 { font-size: 100%; }
- 
-a.headerlink {
-    color: #c60f0f;
-    font-size: 0.8em;
-    padding: 0 4px 0 4px;
-    text-decoration: none;
-}
- 
-a.headerlink:hover {
-    background-color: #c60f0f;
-    color: white;
-}
- 
-div.body p, div.body dd, div.body li {
-    line-height: 1.8em;
-}
- 
-div.admonition p.admonition-title + p {
-    display: inline;
-}
-
-div.highlight{
-    background-color: white;
-}
-
-div.note {
-    background-color: #eeeeee;
-    border: 1px solid #cccccc;
-}
- 
-div.seealso {
-    background-color: #ffffcc;
-    border: 1px solid #ffff66;
-}
- 
-div.topic {
-    background-color: #fafafa;
-    border-width: 0;
-}
- 
-div.warning {
-    background-color: #ffe4e4;
-    border: 1px solid #ff6666;
-}
- 
-p.admonition-title {
-    display: inline;
-}
- 
-p.admonition-title:after {
-    content: ":";
-}
- 
-pre {
-    padding: 10px;
-    background-color: #fafafa;
-    color: #222222;
-    line-height: 1.5em;
-    font-size: 1.1em;
-    margin: 1.5em 0 1.5em 0;
-    -webkit-box-shadow: 0px 0px 4px #d8d8d8;
-    -moz-box-shadow: 0px 0px 4px #d8d8d8;
-    box-shadow: 0px 0px 4px #d8d8d8;
-}
- 
-tt {
-    color: #222222;
-    padding: 1px 2px;
-    font-size: 1.2em;
-    font-family: monospace;
-}
-
-#table-of-contents ul {
-    padding-left: 2em;
-}
-
diff --git a/vendor/pip-1.2.1/docs/_theme/nature/static/pygments.css b/vendor/pip-1.2.1/docs/_theme/nature/static/pygments.css
deleted file mode 100644
index 652b76128b6a174f3407a50fff8735896f47d863..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/_theme/nature/static/pygments.css
+++ /dev/null
@@ -1,54 +0,0 @@
-.c { color: #999988; font-style: italic } /* Comment */
-.k { font-weight: bold } /* Keyword */
-.o { font-weight: bold } /* Operator */
-.cm { color: #999988; font-style: italic } /* Comment.Multiline */
-.cp { color: #999999; font-weight: bold } /* Comment.preproc */
-.c1 { color: #999988; font-style: italic } /* Comment.Single */
-.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
-.ge { font-style: italic } /* Generic.Emph */
-.gr { color: #aa0000 } /* Generic.Error */
-.gh { color: #999999 } /* Generic.Heading */
-.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
-.go { color: #111 } /* Generic.Output */
-.gp { color: #555555 } /* Generic.Prompt */
-.gs { font-weight: bold } /* Generic.Strong */
-.gu { color: #aaaaaa } /* Generic.Subheading */
-.gt { color: #aa0000 } /* Generic.Traceback */
-.kc { font-weight: bold } /* Keyword.Constant */
-.kd { font-weight: bold } /* Keyword.Declaration */
-.kp { font-weight: bold } /* Keyword.Pseudo */
-.kr { font-weight: bold } /* Keyword.Reserved */
-.kt { color: #445588; font-weight: bold } /* Keyword.Type */
-.m { color: #009999 } /* Literal.Number */
-.s { color: #bb8844 } /* Literal.String */
-.na { color: #008080 } /* Name.Attribute */
-.nb { color: #999999 } /* Name.Builtin */
-.nc { color: #445588; font-weight: bold } /* Name.Class */
-.no { color: #ff99ff } /* Name.Constant */
-.ni { color: #800080 } /* Name.Entity */
-.ne { color: #990000; font-weight: bold } /* Name.Exception */
-.nf { color: #990000; font-weight: bold } /* Name.Function */
-.nn { color: #555555 } /* Name.Namespace */
-.nt { color: #000080 } /* Name.Tag */
-.nv { color: purple } /* Name.Variable */
-.ow { font-weight: bold } /* Operator.Word */
-.mf { color: #009999 } /* Literal.Number.Float */
-.mh { color: #009999 } /* Literal.Number.Hex */
-.mi { color: #009999 } /* Literal.Number.Integer */
-.mo { color: #009999 } /* Literal.Number.Oct */
-.sb { color: #bb8844 } /* Literal.String.Backtick */
-.sc { color: #bb8844 } /* Literal.String.Char */
-.sd { color: #bb8844 } /* Literal.String.Doc */
-.s2 { color: #bb8844 } /* Literal.String.Double */
-.se { color: #bb8844 } /* Literal.String.Escape */
-.sh { color: #bb8844 } /* Literal.String.Heredoc */
-.si { color: #bb8844 } /* Literal.String.Interpol */
-.sx { color: #bb8844 } /* Literal.String.Other */
-.sr { color: #808000 } /* Literal.String.Regex */
-.s1 { color: #bb8844 } /* Literal.String.Single */
-.ss { color: #bb8844 } /* Literal.String.Symbol */
-.bp { color: #999999 } /* Name.Builtin.Pseudo */
-.vc { color: #ff99ff } /* Name.Variable.Class */
-.vg { color: #ff99ff } /* Name.Variable.Global */
-.vi { color: #ff99ff } /* Name.Variable.Instance */
-.il { color: #009999 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/vendor/pip-1.2.1/docs/_theme/nature/theme.conf b/vendor/pip-1.2.1/docs/_theme/nature/theme.conf
deleted file mode 100644
index 1cc40044646bb73870088ddc88543c58a3ca083e..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/_theme/nature/theme.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[theme]
-inherit = basic
-stylesheet = nature.css
-pygments_style = tango
diff --git a/vendor/pip-1.2.1/docs/ci-server-step-by-step.txt b/vendor/pip-1.2.1/docs/ci-server-step-by-step.txt
deleted file mode 100644
index 0d0f66296dd4a70a70a5f88fe181f66aadd01a5c..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/ci-server-step-by-step.txt
+++ /dev/null
@@ -1,252 +0,0 @@
-==========================================
-Hudson CI server installation step by step
-==========================================
-
-Why Hudson
-==========
-
-One of the advantages of `Hudson <hudson-ci.org/>`_ over 
-`Buildbot <http://buildbot.net/>`_, for instance, is that
-almost everything is done via its web interface. So anyone can
-manage the continuous integration server easily.
-Another advantage over the other alternatives is Hudson has many
-`available plugins <http://wiki.hudson-ci.org/display/HUDSON/Plugins>`_,
-so you don't need to write your own.
-
-Hudson runs on Java, so the next step is to install Java and its dependencies.
-
-
-Java and its dependencies
-=========================
-
-You can install all Java related packages this [not recommended] way::
-
-    $ [sudo] apt-get install ca-certificates-java daemon default-jre\
-    > default-jre-headless gcj-4.4-base gcj-4.4-jre-headless gcj-4.4-jre-lib\
-    > icedtea-6-jre-cacao java-common libaccess-bridge-java\
-    > libaccess-bridge-java-jni libgcj-common libgcj10 libjline-java \
-    > openjdk-6-jre openjdk-6-jre-headless openjdk-6-jre-lib\
-    > rhino tzdata-java tzdata
-    
-
-Or try installing with the information in the next section and
-if you have problems, run::
-    
-    $ [sudo] apt-get install -f
-
-
-
-
-Installation
-============
-
-The recommended installation from `Hudson site for Debian users
-<http://hudson-ci.org/debian/>`_ is installing through the `.deb` package.
-The advantages are: you can automatically upgrade hudson via apt and
-use the `service` or `/etc/init.d` resource to start the daemon on boot.
-
-To install Hudson as they recommend, do the following:
-
-* Add the key to your system::
-    
-    $ wget -O /tmp/key http://hudson-ci.org/debian/hudson-ci.org.key
-    $ [sudo] apt-key add /tmp/key
-
-
-* Then install Hudson::
-
-    $ wget -O /tmp/hudson.deb http://hudson-ci.org/latest/debian/hudson.deb
-    $ [sudo] dpkg --install /tmp/hudson.deb
-
-
-When you reboot the computer the web daemon will be started at
-http://localhost:8080. If you don't want to reboot the computer, run::
-
-    $ [sudo] service hudson start
-    or
-    $ [sudo] /etc/init.d/hudson start
-
-
-
-
-Apache
-======
-
-It is not necessary for all users, but if you want to set up apache to run
-the web interface, you should follow this tutorial:
-http://wiki.hudson-ci.org/display/HUDSON/Running+Hudson+behind+Apache
-
-
-Plugins
-=======
-
-Installing plugins in Hudson is very easy.
-Just click *Manage Hudson*, then *Manage Plugins*.
-The tab *Updates* shows all available updates to installed plugins.
-But what we need now is to install plugins. So we must go to the
-*Available* tab and check what we want to be installed and then press the
-*Install* button in the end of the page.
-
-The Hudson server hosted at http://ci.cloudsilverlining.org has the following
-plugins installed for pip project:
-
-* Hudson IRC Plugin
-* Green Balls
-* Hudson Continuous Integration game
-* Hudson instant-messaging plugin
-* Hudson Jabber notifier plugin
-* Hudson Email Extension Plugin
-* Hudson Mercurial plugin
-
-
-Creating a Job
-==============
-
-Before Creating a Job for pip
------------------------------
-
-Hudson manages "jobs". Jobs are complete builds to Hudson. For instance,
-you want to build pip project and run its tests with nosetests.
-This section  assumes you have all needed `dependencies installed`_.
-
-You need to set up some configuration in Hudson before creating your first job.
-Go to Hudson home page, "Manage Hudson", then "Configure System".
-
-In the Mercurial section, fill the "Add Mercurial" section with:
-
-* Name: `hg`
-* Installation directory: `/usr`
-* Executable: `INSTALLATION/bin/hg`
-
-In the Shell section fill the shell executable with `/bin/bash`.
-
-Then press the "Save" button in the bottom of the page.
-
-
-    
-    
-Configuring a Job Step by Step
-------------------------------
-
-* Go to the home of the Hudson web interface
-* Click *New Job*
-* Pick a name for the job - pip, for instance
-* Mark the option "Build a free-style software project"
-* Press "OK" button
-
-Now you were redirected to the job's configuration page. Here you will
-tell Hudson how it will build your job. The most important
-steps are listed (assume Mercurial plugin is installed):
-
-* Check "Mercurial" in Source Control Management section
-* Fill in the repository URL with **https://github.com/pypa/pip**
-* Mark "Build periodically" in *Build Triggers* section
-* Add "0 0 \* \* \*" (without quotes) to tell hudson you want to
-  run your build everyday at midnight
-* Click "Add Build Step" in the *Build* section and pick "Execute Shell"
-
-This box will contain all code you want your build run. To run pip's tests
-we need to install pip tests's depencies and run nosetests.
-Add the following lines to the box (it assumes you have virtualenv
-in your system's python)::
-
-    python -mvirtualenv --no-site-packages pip_virtualenv
-    source pip_virtualenv/bin/activate
-    cd $WORKSPACE/..
-    easy_install -U pip
-    cd $WORKSPACE
-    pip install virtualenv scripttest nose
-    nosetests tests -v
-    
-The *$WORKSPACE* environment variable is the current build workspace,
-in the case above it is the clone repository path. The `cd` stuff is
-a work around to a pip's bug.
-
-The process execute above means:
-
-* create a virtualenv called **pip_virtualenv** without shared site-packages
-* activate the environment
-* updates system's pip
-* install pip tests's dependencies
-* run nosetests in the current directory
-
-
-Press the "Save" button and in the next page test if the build is correct
-by clicking "Build now" button.
-
-In the left sidebar will appear the run builds and the running (if exists).
-Click the top build, then "Console Output". Now you can
-watch what Hudson is doing to build your job and watch the results.
-
-
-
-
-Notes
-=====
-
-If you change anything in your system environment, like updating
-your environment configuration files, and realize Hudson
-didn't catch your changes, try restarting it::
-
-    $ [sudo] service hudson stop
-    $ [sudo] service hudson start
-    
-If when you run the `start` command you get an error telling you the port
-is being used, wait about 2 or 3 seconds and try the command again - it's the
-time the port releasing may take.
-
-What is covered here is the basic knowledge to start setting up and using
-a Hudson server, the goal is not teaching all about Hudson or all about
-how to set up every detail.
-
-There is a running Hudson server aimed for pip project here:
-http://ci.cloudsilverlining.org/view/pip
-
-
-Creating a Windows Slave to Run Jobs
-====================================
-
-After starting Hudson on Linux, start your Windows machine and access the
-Hudson web interface.
-
-Adding a Windows Node to Hudson CI Server
------------------------------------------
-
-Click "Manage Hudson", "Manage Nodes", "New Node". The **Node name** value
-must be the Windows machine domain name - mywindowsslave.myhost.com, for
-instance.
-
-The "Launch method" should be **Launch slave agents via JNLP**
-
-.. image:: _static/launch-jnlp-slave.JPG
-   :width: 500px
-   :target: _static/launch-jnlp-slave.JPG
-
-Then press the **Add** button, and in the next page click
-the **Launch** icon.
-
-.. image:: _static/slave-launch-icon.png
-   :width: 500px
-   :target: _static/launch-jnlp-slave.JPG
-
-Now you are able to create jobs tied to this Windows machine.
-
-
-Creating Tied Jobs
-------------------
-
-The process of creating a job is almost the same as the list in the
-`Creating a Job`_ section, the only difference is that you need
-to mark the **Tie this project to a node** option and select what
-node you want to run that build.
-
-There is a difference in build commands, relying on variables. On Linux
-they all start with `$`, like `$WORKSPACE`.
-In Windows they will be enclosed by `%`,  like `%WORKSPACE%`. And everything
-you were doing depending on Bash, you will need to change to DOS
-prompt commands and batch files.
-
-
-
-.. _dependencies installed: running-tests.html#system-requirements
-.. _creating a job: #creating-a-job
diff --git a/vendor/pip-1.2.1/docs/conf.py b/vendor/pip-1.2.1/docs/conf.py
deleted file mode 100644
index 7dd29dc87fbad89bf2f71c0de25930387b650d6f..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/conf.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# pip documentation build configuration file, created by
-# sphinx-quickstart on Tue Apr 22 22:08:49 2008
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys, os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
-#sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
-
-# -- General configuration -----------------------------------------------------
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-#extensions = ['sphinx.ext.autodoc']
-extensions = []
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.txt'
-
-# The encoding of source files.
-#source_encoding = 'utf-8'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = 'pip'
-copyright = '2008-2011, The pip developers'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-release = "1.1"
-version = '.'.join(release.split('.')[:2])
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-today_fmt = '%B %d, %Y'
-
-# List of documents that shouldn't be included in the build.
-#unused_docs = []
-
-# List of directories, relative to source directory, that shouldn't be searched
-# for source files.
-exclude_trees = ['build']
-
-# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages.  Major themes that come with
-# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = 'nature'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further.  For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['_theme']
-
-# The name for this set of Sphinx documents.  If None, it defaults to
-# "<project> v<release> documentation".
-#html_title = None
-
-# A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = '_static/piplogo.png'
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = 'favicon.png'
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-html_use_modindex = False
-
-# If false, no index is generated.
-html_use_index = False
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-html_show_sourcelink = False
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it.  The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = ''
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'pipdocs'
-
-
-# -- Options for LaTeX output --------------------------------------------------
-
-# The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
-
-# The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
-  ('index', 'pip.tex', u'pip Documentation',
-   u'The pip developers', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_use_modindex = True
diff --git a/vendor/pip-1.2.1/docs/configuration.txt b/vendor/pip-1.2.1/docs/configuration.txt
deleted file mode 100644
index 35d0a347f41f9ba62ab8949a92db7941ec865d5c..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/configuration.txt
+++ /dev/null
@@ -1,141 +0,0 @@
-Configuration
-=============
-
-pip allows you to set its default options by using the following facilities,
-in the order of each item's importance:
-
-1. Command line options
-
-2. `Environment variables`_
-
-3. `Config files`_
-
-   1. Command specific section, e.g. ``[install]``
-   2. Global section ``[global]``
-
-That means it will check each of those configuration sources and set the
-defaults appropriately.
-
-Examples
---------
-
-- ``--host=foo`` overrides ``PIP_HOST=foo``
-- ``PIP_HOST=foo`` overrides a config file with ``[global] host = foo``
-- A command specific section in the config file ``[<command>] host = bar``
-  overrides the option with same name in the ``[global]`` config file section
-- Environment variables override config files
-
-Config files
-------------
-
-pip allows you to set all command line option defaults in a standard ini
-style config file.
-
-The names of the settings are derived from the long command line option, e.g.
-if you want to use a different package index (``--index-url``) and set the
-HTTP timeout (``--default-timeout``) to 60 seconds your config file would
-look like this:
-
-.. code-block:: ini
-
-    [global]
-    timeout = 60
-    index-url = http://download.zope.org/ppix
-
-Each subcommand can be configured optionally in its own section so that every
-global setting with the same name will be overridden; e.g. decreasing the
-``timeout`` to ``10`` seconds when running the `freeze`
-(`Freezing Requirements <./#freezing-requirements>`_) command and using
-``60`` seconds for all other commands is possible with:
-
-.. code-block:: ini
-
-    [global]
-    timeout = 60
-    
-    [freeze]
-    timeout = 10
-
-Boolean options like ``--ignore-installed`` or ``--no-dependencies`` can be
-set like this:
-
-.. code-block:: ini
-
-    [install]
-    ignore-installed = true
-    no-dependencies = yes
-
-Appending options like ``--find-links`` can be written on multiple lines:
-
-.. code-block:: ini
-
-    [global]
-    find-links =
-        http://download.example.com
-
-    [install]
-    find-links =
-        http://mirror1.example.com
-        http://mirror2.example.com
-
-Location
-********
-
-The names and locations of the configuration files vary slightly across
-platforms.
-
-On Unix and Mac OS X the configuration file is: :file:`$HOME/.pip/pip.conf`
-
-And on Windows, the configuration file is: :file:`%HOME%\\pip\\pip.ini`
-
-Environment variables
------------------------
-
-Just like with `config files`_, each of pip's command line options
-(long version, e.g. ``--find-links``) are automatically set by looking for
-environment variables with the name format ``PIP_<UPPER_NAME>``. That means
-the name of the command line options are capitalized and have dashes (``-``)
-replaced with underscores (``_``).
-
-For example, to redefine the default timeout you can also set an
-environment variable::
-
-    export PIP_DEFAULT_TIMEOUT=60
-    pip install ipython
-
-Which is the same as passing the option to pip directly::
-
-    pip --default-timeout=60 install ipython
-
-This also works for appending command line options, like ``--find-links``.
-Just leave an empty space between the passsed values, e.g.::
-
-    export PIP_FIND_LINKS="http://mirror1.example.com http://mirror2.example.com"
-
-is the same as calling::
-
-    pip install --find-links=http://mirror1.example.com --find-links=http://mirror2.example.com
-
-Configuration options
----------------------
-
-Mirror support
-**************
-
-The `PyPI mirroring infrastructure <http://pypi.python.org/mirrors>`_ as
-described in `PEP 381 <http://www.python.org/dev/peps/pep-0381/>`_ can be
-used by passing the ``--use-mirrors`` option to the install command.
-Alternatively, you can use the other ways to configure pip, e.g.::
-
-    $ export PIP_USE_MIRRORS=true
-
-If enabled, pip will automatically query the DNS entry of the mirror index URL
-to find the list of mirrors to use. In case you want to override this list,
-please use the ``--mirrors`` option of the install command, or add to your pip
-configuration file::
-
-    [install]
-    use-mirrors = true
-    mirrors =
-        http://d.pypi.python.org
-        http://b.pypi.python.org
diff --git a/vendor/pip-1.2.1/docs/contributing.txt b/vendor/pip-1.2.1/docs/contributing.txt
deleted file mode 100644
index c3d0c13567225d8be1f571c813edb82be1203654..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/contributing.txt
+++ /dev/null
@@ -1,167 +0,0 @@
-=================
-How to contribute
-=================
-
-
-All kinds of contributions are welcome - code, tests, documentation,
-bug reports, ideas, etc.
-
-
-Release Schedule
-================
-
-Minor releases of pip (e.g. 1.1, 1.2, 1.3...) occur every four months
-(beginning with the release of pip 1.0 on April 4, 2011). Two weeks before a
-scheduled release, a new branch ``release/X.Y`` is created for release testing
-and preparation. This branch is only open to bugfixes.
-
-.. _contributing-with-code:
-
-Contributing with Code
-======================
-
-Forking through Github
-----------------------
-
-First of all, you need to fork the the official repository, which is
-https://github.com/pypa/pip.
-
-Log in to Github, go to the `pip repository page
-<https://github.com/pypa/pip>`_, follow the **fork** link, wait for Github
-to copy the repository and then clone your fork, like::
-
-    $ git clone https://github.com/YOU_USER_NAME/pip
-
-Now you can change whatever you want, commit, push to your fork and when your
-contribution is done, follow the **pull request** link and send us a request
-explaining what you did and why.
-
-Branches
---------
-
-Pip uses the `git-flow`_ branching model. The default branch on GitHub is
-``develop``, and all development work (new features and bugfixes) should happen
-in that branch. The ``master`` branch is stable, and reflects the last released
-state.
-
-.. _git-flow: http://nvie.com/posts/a-successful-git-branching-model/
-
-All tests should pass
----------------------
-
-Almost all changes to pip should be accompanied by automated tests -
-especially ones adding new behavior.
-
-`Nose`_ is used to find and run all tests. Take a look at :doc:`running-tests`
-to see what you need and how you should run the tests.
-
-Before sending us a pull request, please, be sure all tests pass.
-
-Supported Python versions
--------------------------
-
-Pip supports Python versions 2.4, 2.5, 2.6, 2.7, 3.1, and 3.2, from a single
-codebase (without use of 2to3 translation). Untested contributions frequently
-break Python 2.4 or 3.x compatibility. Please run the tests on at least 2.4 and
-3.2 and report your results when sending a pull request.
-
-Continuous Integration server
------------------------------
-
-We have a continuous integration server running all pip related tests at
-http://ci.cloudsilverlining.org/view/pip. But if you want to have your own,
-you can learn how to set up a Hudson CI server like that in the
-:doc:`ci-server-step-by-step` page.
-
-
-
-Running the Tests
-=================
-
-Pip uses some system tools - VCS related tools - in its tests, so you need to
-intall them (Linux)::
-
-    sudo apt-get install subversion bzr git-core mercurial
-
-Or downloading and installing `Subversion
-<http://subversion.apache.org/packages.html>`_, `Bazaar
-<http://wiki.bazaar.canonical.com/Download>`_, `Git
-<http://git-scm.com/download>`_ and `Mercurial
-<http://mercurial.selenic.com/downloads/>`_ manually.
-
-
-After all requirements (system and python) are installed,
-just run the following command::
-
-    $ python setup.py test
-
-Running tests directly with Nose
---------------------------------
-
-If you want to run only a selection of the tests, you'll need to run them
-directly with nose instead. Create a virtualenv, and install required
-packages::
-
-    pip install nose virtualenv scripttest mock
-
-Run nosetests::
-
-    nosetests
-
-Or select just a single test to run::
-
-    cd tests; nosetests test_upgrade.py:test_uninstall_rollback
-
-
-Troubleshooting
----------------
-
-Locale Warnings
-    There was a problem with locales configuration when running tests in a Hudson
-    CI Server that broke some tests. The problem was not with pip, but with
-    `locales` configuration. Hudson was not setting LANG environment variable
-    correctly, so the solution to fix it was changing default language to
-    en_US.UTF-8.
-    The following has been done in a Ubuntu Server 9.10 machine::
-
-        $ sudo locale-gen en_US en_US.UTF-8
-        $ sudo dpkg-reconfigure locales
-        $ sudo update-locale LANG=en_US.UTF-8
-
-
-
-Contributing with Tests
-=======================
-
-Pip's test coverage is not as good as we would like, so contributions of
-additional tests are welcome. You can contribute these the same way you would
-contribute any other kind of code: see the :ref:`contributing-with-code`
-section.
-
-
-Contributing with Bug Reports
-=============================
-
-Pip project is hosted at `Github`_ and uses its issue tracker system.
-
-If you have found a bug and want to report it, go to `pip issue tracker page`_,
-click **Create new**, add a descriptive
-title (so we can easily identify what the bug is) and fill the description box
-explaining how you got the bug, what pip version you were using and what is
-your operating system, so we can reproduce the bug to try fixing it.
-
-
-
-Contributing with Ideas
-=======================
-
-We are always open to new ideas, and we will enjoy yours. You can send
-enhancement ideas and proposals via `pip issue tracker page`_,
-`virtualenv mailing list`_, or #pip channel at freenode.
-
-
-
-.. _nose:  http://somethingaboutorange.com/mrl/projects/nose/0.11.3/
-.. _Github: http://github.com/
-.. _pip issue tracker page: https://github.com/pypa/pip/issues
-.. _virtualenv mailing list: http://groups.google.com/group/python-virtualenv/
diff --git a/vendor/pip-1.2.1/docs/glossary.txt b/vendor/pip-1.2.1/docs/glossary.txt
deleted file mode 100644
index c9decea5d7da41158b99c49e04b33b986d319a74..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/glossary.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-========
-Glossary
-========
-
-.. glossary::
-
-    PyPI
-        The `Python Package Index`_, formerly known as the Cheese Shop,
-        is a central catalog of Python packages. By default, when
-        installing packages,`pip` searches for them in PyPI.
-
-        .. _`Python Package Index`: http://pypi.python.org/pypi
diff --git a/vendor/pip-1.2.1/docs/index.txt b/vendor/pip-1.2.1/docs/index.txt
deleted file mode 100644
index 3f117c0ce2b42c65048ee37a90b4d0f4d77a1565..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/index.txt
+++ /dev/null
@@ -1,54 +0,0 @@
-pip
-===
-
-`pip` is a tool for installing and managing Python packages, such as
-those found in the `Python Package Index`_. It's a replacement for
-easy_install_.
-::
-
-    $ pip install simplejson
-    [... progress report ...]
-    Successfully installed simplejson
-
-.. _`Python Package Index`: http://pypi.python.org/pypi
-.. _easy_install: http://peak.telecommunity.com/DevCenter/EasyInstall
-
-Upgrading a package::
-
-    $ pip install --upgrade simplejson
-    [... progress report ...]
-    Successfully installed simplejson
-
-Removing a package::
-
-    $ pip uninstall simplejson
-    Uninstalling simplejson:
-      /home/me/env/lib/python2.7/site-packages/simplejson
-      /home/me/env/lib/python2.7/site-packages/simplejson-2.2.1-py2.7.egg-info
-    Proceed (y/n)? y
-      Successfully uninstalled simplejson
-
-.. comment: split here
-
-.. toctree::
-   :maxdepth: 2
-
-   installing
-   usage
-   requirements
-   configuration
-   other-tools
-   contributing
-   news
-   glossary
-
-.. comment: split here
-
-Community
----------
-
-The homepage for pip is at `pip-installer.org <http://www.pip-installer.org/>`_.
-Bugs can be filed in the `pip issue tracker
-<https://github.com/pypa/pip/issues/>`_.  Discussion happens on the
-`virtualenv email group
-<http://groups.google.com/group/python-virtualenv?hl=en>`_.
diff --git a/vendor/pip-1.2.1/docs/installing.txt b/vendor/pip-1.2.1/docs/installing.txt
deleted file mode 100644
index 5f86c6a4927b62a970c2d433615a9685f6869eff..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/installing.txt
+++ /dev/null
@@ -1,66 +0,0 @@
-Installation instructions
-=========================
-
-The recommended way to use pip is within `virtualenv
-<http://www.virtualenv.org>`_, since every virtualenv has pip installed in it
-automatically. This does not require root access or modify your system Python
-installation. For instance::
-
-    $ curl -O https://raw.github.com/pypa/virtualenv/master/virtualenv.py
-    $ python virtualenv.py my_new_env
-    $ . my_new_env/bin/activate
-    (my_new_env)$ pip install ...
-
-When used in this manner, pip will only affect the active virtual environment.
-If you do want to install pip globally into your Python installation, see the
-instructions below.
-
-Prerequisites
--------------
-
-Prior to installing pip make sure you have either `setuptools
-<http://pypi.python.org/pypi/setuptools>`_ or `distribute
-<http://pypi.python.org/pypi/distribute>`_ installed.  Please consult your
-operating system's package manager or install it manually::
-
-    $ curl http://python-distribute.org/distribute_setup.py | python
-
-.. warning::
-
-    If you are using Python 3.X you **must** use distribute; setuptools doesn't
-    support Python 3.X.
-
-Using the installer
--------------------
-
-Download `get-pip.py <https://raw.github.com/pypa/pip/master/contrib/get-pip.py>`_
-and execute it, using the Python interpreter of your choice::
-
-    $ curl https://raw.github.com/pypa/pip/master/contrib/get-pip.py | python
-
-This may have to be run as root.
-
-Alternative installation procedures
------------------------------------
-
-Using the source distribution
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-You can find the source on `PyPI <http://pypi.python.org/pypi/pip>`_::
-
-    $ curl -O http://pypi.python.org/packages/source/p/pip/pip-1.0.tar.gz
-    $ tar xvfz pip-1.0.tar.gz
-    $ cd pip-1.0
-    $ python setup.py install # may need to be root
-
-Installing the development version
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-First you will need to clone the git repo::
-
-    $ git clone https://github.com/pypa/pip.git
-
-Now we can install from the repo::
-
-    $ cd pip
-    $ python setup.py install # may need to be root
diff --git a/vendor/pip-1.2.1/docs/make.bat b/vendor/pip-1.2.1/docs/make.bat
deleted file mode 100644
index aa5c189fcfb97a3a1735fe7a36184a0bf3c1a248..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/make.bat
+++ /dev/null
@@ -1,170 +0,0 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
-	set SPHINXBUILD=sphinx-build
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
-	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
-	:help
-	echo.Please use `make ^<target^>` where ^<target^> is one of
-	echo.  html       to make standalone HTML files
-	echo.  dirhtml    to make HTML files named index.html in directories
-	echo.  singlehtml to make a single large HTML file
-	echo.  pickle     to make pickle files
-	echo.  json       to make JSON files
-	echo.  htmlhelp   to make HTML files and a HTML help project
-	echo.  qthelp     to make HTML files and a qthelp project
-	echo.  devhelp    to make HTML files and a Devhelp project
-	echo.  epub       to make an epub
-	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
-	echo.  text       to make text files
-	echo.  man        to make manual pages
-	echo.  changes    to make an overview over all changed/added/deprecated items
-	echo.  linkcheck  to check all external links for integrity
-	echo.  doctest    to run all doctests embedded in the documentation if enabled
-	goto end
-)
-
-if "%1" == "clean" (
-	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
-	del /q /s %BUILDDIR%\*
-	goto end
-)
-
-if "%1" == "html" (
-	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
-	goto end
-)
-
-if "%1" == "dirhtml" (
-	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
-	goto end
-)
-
-if "%1" == "singlehtml" (
-	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
-	goto end
-)
-
-if "%1" == "pickle" (
-	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished; now you can process the pickle files.
-	goto end
-)
-
-if "%1" == "json" (
-	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished; now you can process the JSON files.
-	goto end
-)
-
-if "%1" == "htmlhelp" (
-	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished; now you can run HTML Help Workshop with the ^
-.hhp project file in %BUILDDIR%/htmlhelp.
-	goto end
-)
-
-if "%1" == "qthelp" (
-	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished; now you can run "qcollectiongenerator" with the ^
-.qhcp project file in %BUILDDIR%/qthelp, like this:
-	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-compressor.qhcp
-	echo.To view the help file:
-	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-compressor.ghc
-	goto end
-)
-
-if "%1" == "devhelp" (
-	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished.
-	goto end
-)
-
-if "%1" == "epub" (
-	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished. The epub file is in %BUILDDIR%/epub.
-	goto end
-)
-
-if "%1" == "latex" (
-	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
-	goto end
-)
-
-if "%1" == "text" (
-	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished. The text files are in %BUILDDIR%/text.
-	goto end
-)
-
-if "%1" == "man" (
-	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Build finished. The manual pages are in %BUILDDIR%/man.
-	goto end
-)
-
-if "%1" == "changes" (
-	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.The overview file is in %BUILDDIR%/changes.
-	goto end
-)
-
-if "%1" == "linkcheck" (
-	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Link check complete; look for any errors in the above output ^
-or in %BUILDDIR%/linkcheck/output.txt.
-	goto end
-)
-
-if "%1" == "doctest" (
-	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
-	if errorlevel 1 exit /b 1
-	echo.
-	echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
-	goto end
-)
-
-:end
diff --git a/vendor/pip-1.2.1/docs/news.txt b/vendor/pip-1.2.1/docs/news.txt
deleted file mode 100644
index 3bcdb2a2040103fa05b46c2fb28579734bab4a63..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/news.txt
+++ /dev/null
@@ -1,558 +0,0 @@
-====
-News
-====
-
-Changelog
-=========
-
-Next release (1.2) schedule
----------------------------
-
-Beta and final releases planned for the second half of 2012.
-
-1.1 (2012-02-16)
-----------------
-
-* Fixed issue #326 - don't crash when a package's setup.py emits UTF-8 and
-  then fails. Thanks Marc Abramowitz.
-
-* Added ``--target`` option for installing directly to arbitrary directory.
-  Thanks Stavros Korokithakis.
-
-* Added support for authentication with Subversion repositories. Thanks
-  Qiangning Hong.
-
-* Fixed issue #315 - ``--download`` now downloads dependencies as well.
-  Thanks Qiangning Hong.
-
-* Errors from subprocesses will display the current working directory.
-  Thanks Antti Kaihola.
-
-* Fixed issue #369 - compatibility with Subversion 1.7. Thanks Qiangning
-  Hong. Note that setuptools remains incompatible with Subversion 1.7; to
-  get the benefits of pip's support you must use Distribute rather than
-  setuptools.
-
-* Fixed issue #57 - ignore py2app-generated OS X mpkg zip files in finder.
-  Thanks Rene Dudfield.
-
-* Fixed issue #182 - log to ~/Library/Logs/ by default on OS X framework
-  installs. Thanks Dan Callahan for report and patch.
-
-* Fixed issue #310 - understand version tags without minor version ("py3")
-  in sdist filenames. Thanks Stuart Andrews for report and Olivier Girardot for
-  patch.
-
-* Fixed issue #7 - Pip now supports optionally installing setuptools
-  "extras" dependencies; e.g. "pip install Paste[openid]". Thanks Matt Maker
-  and Olivier Girardot.
-
-* Fixed issue #391 - freeze no longer borks on requirements files with
-  --index-url or --find-links. Thanks Herbert Pfennig.
-
-* Fixed issue #288 - handle symlinks properly. Thanks lebedov for the patch.
-
-* Fixed issue #49 - pip install -U no longer reinstalls the same versions of
-  packages. Thanks iguananaut for the pull request.
-
-* Removed ``-E`` option and ``PIP_RESPECT_VIRTUALENV``; both use a
-  restart-in-venv mechanism that's broken, and neither one is useful since
-  every virtualenv now has pip inside it.
-
-* Fixed issue #366 - pip throws IndexError when it calls `scraped_rel_links`
-
-* Fixed issue #22 - pip search should set and return a userful shell status code
-
-* Fixed issue #351 and #365 - added global ``--exists-action`` command line
-  option to easier script file exists conflicts, e.g. from editable
-  requirements from VCS that have a changed repo URL.
-
-
-1.0.2 (2011-07-16)
-------------------
-
-* Fixed docs issues.
-* Fixed issue #295 - Reinstall a package when using the ``install -I`` option
-* Fixed issue #283 - Finds a Git tag pointing to same commit as origin/master
-* Fixed issue #279 - Use absolute path for path to docs in setup.py
-* Fixed issue #314 - Correctly handle exceptions on Python3.
-* Fixed issue #320 - Correctly parse ``--editable`` lines in requirements files
-
-1.0.1 (2011-04-30)
-------------------
-
-* Start to use git-flow.
-* Fixed issue #274 - `find_command` should not raise AttributeError
-* Fixed issue #273 - respect Content-Disposition header. Thanks Bradley Ayers.
-* Fixed issue #233 - pathext handling on Windows.
-* Fixed issue #252 - svn+svn protocol.
-* Fixed issue #44 - multiple CLI searches.
-* Fixed issue #266 - current working directory when running setup.py clean.
-
-1.0 (2011-04-04)
-----------------
-
-* Added Python 3 support! Huge thanks to Vinay Sajip, Vitaly Babiy, Kelsey
-  Hightower, and Alex Gronholm, among others.
-
-* Download progress only shown on a real TTY. Thanks Alex Morega.
-
-* Fixed finding of VCS binaries to not be fooled by same-named directories.
-  Thanks Alex Morega.
-
-* Fixed uninstall of packages from system Python for users of Debian/Ubuntu
-  python-setuptools package (workaround until fixed in Debian and Ubuntu).
-
-* Added `get-pip.py <https://raw.github.com/pypa/pip/master/contrib/get-pip.py>`_
-  installer. Simply download and execute it, using the Python interpreter of
-  your choice::
-
-    $ curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py
-    $ python get-pip.py
-
-  This may have to be run as root.
-
-  .. note::
-
-      Make sure you have `distribute <http://pypi.python.org/pypi/distribute>`_
-      installed before using the installer!
-
-0.8.3
------
-
-* Moved main repository to Github: https://github.com/pypa/pip
-
-* Transferred primary maintenance from Ian to Jannis Leidel, Carl Meyer, Brian Rosner
-
-* Fixed issue #14 - No uninstall-on-upgrade with URL package. Thanks Oliver Tonnhofer
-
-* Fixed issue #163 - Egg name not properly resolved. Thanks Igor Sobreira
-
-* Fixed issue #178 - Non-alphabetical installation of requirements. Thanks Igor Sobreira
-
-* Fixed issue #199 - Documentation mentions --index instead of --index-url. Thanks Kelsey Hightower
-
-* Fixed issue #204 - rmtree undefined in mercurial.py. Thanks Kelsey Hightower
-
-* Fixed bug in Git vcs backend that would break during reinstallation.
-
-* Fixed bug in Mercurial vcs backend related to pip freeze and branch/tag resolution.
-
-* Fixed bug in version string parsing related to the suffix "-dev".
-
-0.8.2
------
-
-* Avoid redundant unpacking of bundles (from pwaller)
-
-* Fixed issue #32, #150, #161 - Fixed checking out the correct
-  tag/branch/commit when updating an editable Git requirement.
-
-* Fixed issue #49 - Added ability to install version control requirements
-  without making them editable, e.g.::
-
-    pip install git+https://github.com/pypa/pip/
-
-* Fixed issue #175 - Correctly locate build and source directory on Mac OS X.
-
-* Added ``git+https://`` scheme to Git VCS backend.
-
-0.8.1
------
-
-* Added global --user flag as shortcut for --install-option="--user". From
-  Ronny Pfannschmidt.
-
-* Added support for `PyPI mirrors <http://pypi.python.org/mirrors>`_ as
-  defined in `PEP 381 <http://www.python.org/dev/peps/pep-0381/>`_, from
-  Jannis Leidel.
-
-* Fixed issue #138 - Git revisions ignored. Thanks John-Scott Atlakson.
-
-* Fixed issue #95 - Initial editable install of github package from a tag fails. Thanks John-Scott Atlakson.
-
-* Fixed issue #107 - Can't install if a directory in cwd has the same name as the package you're installing.
-
-* Fixed issue #39 - --install-option="--prefix=~/.local" ignored with -e.
-  Thanks Ronny Pfannschmidt and Wil Tan.
-
-
-
-0.8
----
-
-* Track which ``build/`` directories pip creates, never remove directories
-  it doesn't create.  From Hugo Lopes Tavares.
-
-* Pip now accepts file:// index URLs. Thanks Dave Abrahams.
-
-* Various cleanup to make test-running more consistent and less fragile.
-  Thanks Dave Abrahams.
-
-* Real Windows support (with passing tests). Thanks Dave Abrahams.
-
-* ``pip-2.7`` etc. scripts are created (Python-version specific scripts)
-
-* ``contrib/build-standalone`` script creates a runnable ``.zip`` form of
-  pip, from Jannis Leidel
-
-* Editable git repos are updated when reinstalled
-
-* Fix problem with ``--editable`` when multiple ``.egg-info/`` directories
-  are found.
-
-* A number of VCS-related fixes for ``pip freeze``, from Hugo Lopes Tavares.
-
-* Significant test framework changes, from Hugo Lopes Tavares.
-
-0.7.2
------
-
-* Set zip_safe=False to avoid problems some people are encountering where
-  pip is installed as a zip file.
-
-0.7.1
------
-
-* Fixed opening of logfile with no directory name. Thanks Alexandre Conrad.
-
-* Temporary files are consistently cleaned up, especially after
-  installing bundles, also from Alex Conrad.
-
-* Tests now require at least ScriptTest 1.0.3.
-
-0.7
----
-
-* Fixed uninstallation on Windows
-* Added ``pip search`` command.
-* Tab-complete names of installed distributions for ``pip uninstall``.
-* Support tab-completion when there is a global-option before the
-  subcommand.
-* Install header files in standard (scheme-default) location when installing
-  outside a virtualenv. Install them to a slightly more consistent
-  non-standard location inside a virtualenv (since the standard location is
-  a non-writable symlink to the global location).
-* pip now logs to a central location by default (instead of creating
-  ``pip-log.txt`` all over the place) and constantly overwrites the
-  file in question. On Unix and Mac OS X this is ``'$HOME/.pip/pip.log'``
-  and on Windows it's ``'%HOME%\\pip\\pip.log'``. You are still able to
-  override this location with the ``$PIP_LOG_FILE`` environment variable.
-  For a complete (appended) logfile use the separate ``'--log'`` command line
-  option.
-* Fixed an issue with Git that left an editable packge as a checkout of a
-  remote branch, even if the default behaviour would have been fine, too.
-* Fixed installing from a Git tag with older versions of Git.
-* Expand "~" in logfile and download cache paths.
-* Speed up installing from Mercurial repositories by cloning without
-  updating the working copy multiple times.
-* Fixed installing directly from directories (e.g.
-  ``pip install path/to/dir/``).
-* Fixed installing editable packages with ``svn+ssh`` URLs.
-* Don't print unwanted debug information when running the freeze command.
-* Create log file directory automatically. Thanks Alexandre Conrad.
-* Make test suite easier to run successfully. Thanks Dave Abrahams.
-* Fixed "pip install ." and "pip install .."; better error for directory
-  without setup.py. Thanks Alexandre Conrad.
-* Support Debian/Ubuntu "dist-packages" in zip command. Thanks duckx.
-* Fix relative --src folder. Thanks Simon Cross.
-* Handle missing VCS with an error message. Thanks Alexandre Conrad.
-* Added --no-download option to install; pairs with --no-install to separate
-  download and installation into two steps. Thanks Simon Cross.
-* Fix uninstalling from requirements file containing -f, -i, or
-  --extra-index-url.
-* Leftover build directories are now removed. Thanks Alexandre Conrad.
-
-0.6.3
------
-
-* Fixed import error on Windows with regard to the backwards compatibility
-  package
-
-0.6.2
------
-
-* Fixed uninstall when /tmp is on a different filesystem.
-
-* Fixed uninstallation of distributions with namespace packages.
-
-0.6.1
------
-
-* Added support for the ``https`` and ``http-static`` schemes to the
-  Mercurial and ``ftp`` scheme to the Bazaar backend.
-
-* Fixed uninstallation of scripts installed with easy_install.
-
-* Fixed an issue in the package finder that could result in an
-  infinite loop while looking for links.
-
-* Fixed issue with ``pip bundle`` and local files (which weren't being
-  copied into the bundle), from Whit Morriss.
-
-0.6
----
-
-* Add ``pip uninstall`` and uninstall-before upgrade (from Carl
-  Meyer).
-
-* Extended configurability with config files and environment variables.
-
-* Allow packages to be upgraded, e.g., ``pip install Package==0.1``
-  then ``pip install Package==0.2``.
-
-* Allow installing/upgrading to Package==dev (fix "Source version does not
-  match target version" errors).
-
-* Added command and option completion for bash and zsh.
-
-* Extended integration with virtualenv by providing an option to
-  automatically use an active virtualenv and an option to warn if no active
-  virtualenv is found.
-
-* Fixed a bug with pip install --download and editable packages, where
-  directories were being set with 0000 permissions, now defaults to 755.
-
-* Fixed uninstallation of easy_installed console_scripts.
-
-* Fixed uninstallation on Mac OS X Framework layout installs
-
-* Fixed bug preventing uninstall of editables with source outside venv.
-
-* Creates download cache directory if not existing.
-
-0.5.1
------
-
-* Fixed a couple little bugs, with git and with extensions.
-
-0.5
----
-
-* Added ability to override the default log file name (``pip-log.txt``)
-  with the environmental variable ``$PIP_LOG_FILE``.
-
-* Made the freeze command print installed packages to stdout instead of
-  writing them to a file. Use simple redirection (e.g.
-  ``pip freeze > stable-req.txt``) to get a file with requirements.
-
-* Fixed problem with freezing editable packages from a Git repository.
-
-* Added support for base URLs using ``<base href='...'>`` when parsing
-  HTML pages.
-
-* Fixed installing of non-editable packages from version control systems.
-
-* Fixed issue with Bazaar's bzr+ssh scheme.
-
-* Added --download-dir option to the install command to retrieve package
-  archives. If given an editable package it will create an archive of it.
-
-* Added ability to pass local file and directory paths to ``--find-links``,
-  e.g. ``--find-links=file:///path/to/my/private/archive``
-
-* Reduced the amount of console log messages when fetching a page to find a
-  distribution was problematic. The full messages can be found in pip-log.txt.
-
-* Added ``--no-deps`` option to install ignore package dependencies
-
-* Added ``--no-index`` option to ignore the package index (PyPI) temporarily
-
-* Fixed installing editable packages from Git branches.
-
-* Fixes freezing of editable packages from Mercurial repositories.
-
-* Fixed handling read-only attributes of build files, e.g. of Subversion and
-  Bazaar on Windows.
-
-* When downloading a file from a redirect, use the redirected
-  location's extension to guess the compression (happens specifically
-  when redirecting to a bitbucket.org tip.gz file).
-
-* Editable freeze URLs now always use revision hash/id rather than tip or
-  branch names which could move.
-
-* Fixed comparison of repo URLs so incidental differences such as
-  presence/absence of final slashes or quoted/unquoted special
-  characters don't trigger "ignore/switch/wipe/backup" choice.
-
-* Fixed handling of attempt to checkout editable install to a
-  non-empty, non-repo directory.
-
-0.4
----
-
-* Make ``-e`` work better with local hg repositories
-
-* Construct PyPI URLs the exact way easy_install constructs URLs (you
-  might notice this if you use a custom index that is
-  slash-sensitive).
-
-* Improvements on Windows (from `Ionel Maries Cristian
-  <http://ionelmc.wordpress.com/>`_).
-
-* Fixed problem with not being able to install private git repositories.
-
-* Make ``pip zip`` zip all its arguments, not just the first.
-
-* Fix some filename issues on Windows.
-
-* Allow the ``-i`` and ``--extra-index-url`` options in requirements
-  files.
-
-* Fix the way bundle components are unpacked and moved around, to make
-  bundles work.
-
-* Adds ``-s`` option to allow the access to the global site-packages if a
-  virtualenv is to be created.
-
-* Fixed support for Subversion 1.6.
-
-0.3.1
------
-
-* Improved virtualenv restart and various path/cleanup problems on win32.
-
-* Fixed a regression with installing from svn repositories (when not
-  using ``-e``).
-
-* Fixes when installing editable packages that put their source in a
-  subdirectory (like ``src/``).
-
-* Improve ``pip -h``
-
-0.3
----
-
-* Added support for editable packages created from Git, Mercurial and Bazaar
-  repositories and ability to freeze them. Refactored support for version
-  control systems.
-
-* Do not use ``sys.exit()`` from inside the code, instead use a
-  return.  This will make it easier to invoke programmatically.
-
-* Put the install record in ``Package.egg-info/installed-files.txt``
-  (previously they went in
-  ``site-packages/install-record-Package.txt``).
-
-* Fix a problem with ``pip freeze`` not including ``-e svn+`` when an
-  svn structure is peculiar.
-
-* Allow ``pip -E`` to work with a virtualenv that uses a different
-  version of Python than the parent environment.
-
-* Fixed Win32 virtualenv (``-E``) option.
-
-* Search the links passed in with ``-f`` for packages.
-
-* Detect zip files, even when the file doesn't have a ``.zip``
-  extension and it is served with the wrong Content-Type.
-
-* Installing editable from existing source now works, like ``pip
-  install -e some/path/`` will install the package in ``some/path/``.
-  Most importantly, anything that package requires will also be
-  installed by pip.
-
-* Add a ``--path`` option to ``pip un/zip``, so you can avoid zipping
-  files that are outside of where you expect.
-
-* Add ``--simulate`` option to ``pip zip``.
-
-0.2.1
------
-
-* Fixed small problem that prevented using ``pip.py`` without actually
-  installing pip.
-
-* Fixed ``--upgrade``, which would download and appear to install
-  upgraded packages, but actually just reinstall the existing package.
-
-* Fixed Windows problem with putting the install record in the right
-  place, and generating the ``pip`` script with Setuptools.
-
-* Download links that include embedded spaces or other unsafe
-  characters (those characters get %-encoded).
-
-* Fixed use of URLs in requirement files, and problems with some blank
-  lines.
-
-* Turn some tar file errors into warnings.
-
-0.2
----
-
-* Renamed to ``pip``, and to install you now do ``pip install
-  PACKAGE``
-
-* Added command ``pip zip PACKAGE`` and ``pip unzip PACKAGE``.  This
-  is particularly intended for Google App Engine to manage libraries
-  to stay under the 1000-file limit.
-
-* Some fixes to bundles, especially editable packages and when
-  creating a bundle using unnamed packages (like just an svn
-  repository without ``#egg=Package``).
-
-0.1.4
------
-
-* Added an option ``--install-option`` to pass options to pass
-  arguments to ``setup.py install``
-
-* ``.svn/`` directories are no longer included in bundles, as these
-  directories are specific to a version of svn -- if you build a
-  bundle on a system with svn 1.5, you can't use the checkout on a
-  system with svn 1.4.  Instead a file ``svn-checkout.txt`` is
-  included that notes the original location and revision, and the
-  command you can use to turn it back into an svn checkout.  (Probably
-  unpacking the bundle should, maybe optionally, recreate this
-  information -- but that is not currently implemented, and it would
-  require network access.)
-
-* Avoid ambiguities over project name case, where for instance
-  MyPackage and mypackage would be considered different packages.
-  This in particular caused problems on Macs, where ``MyPackage/`` and
-  ``mypackage/`` are the same directory.
-
-* Added support for an environmental variable
-  ``$PIP_DOWNLOAD_CACHE`` which will cache package downloads, so
-  future installations won't require large downloads.  Network access
-  is still required, but just some downloads will be avoided when
-  using this.
-
-0.1.3
------
-
-* Always use ``svn checkout`` (not ``export``) so that
-  ``tag_svn_revision`` settings give the revision of the package.
-
-* Don't update checkouts that came from ``.pybundle`` files.
-
-0.1.2
------
-
-* Improve error text when there are errors fetching HTML pages when
-  seeking packages.
-
-* Improve bundles: include empty directories, make them work with
-  editable packages.
-
-* If you use ``-E env`` and the environment ``env/`` doesn't exist, a
-  new virtual environment will be created.
-
-* Fix ``dependency_links`` for finding packages.
-
-0.1.1
------
-
-* Fixed a NameError exception when running pip outside of a
-  virtualenv environment.
-
-* Added HTTP proxy support (from Prabhu Ramachandran)
-
-* Fixed use of ``hashlib.md5`` on python2.5+ (also from Prabhu
-  Ramachandran)
-
-0.1
----
-
-* Initial release
diff --git a/vendor/pip-1.2.1/docs/other-tools.txt b/vendor/pip-1.2.1/docs/other-tools.txt
deleted file mode 100644
index a705e8ab27b2cf8fc1d9db7baf383cf32f755ae7..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/other-tools.txt
+++ /dev/null
@@ -1,131 +0,0 @@
-=============================
-Relationship with other tools
-=============================
-
-Pip Compared To easy_install
-----------------------------
-
-pip is meant to improve on easy_install.  Some of the improvements:
-
-* All packages are downloaded before installation.  Partially-completed
-  installation doesn't occur as a result.
-
-* Care is taken to present useful output on the console.
-
-* The reasons for actions are kept track of.  For instance, if a package is
-  being installed, pip keeps track of why that package was required.
-
-* Error messages should be useful.
-
-* The code is relatively concise and cohesive, making it easier to use
-  programmatically.
-
-* Packages don't have to be installed as egg archives, they can be installed
-  flat (while keeping the egg metadata).
-
-* Native support for other version control systems (Git, Mercurial and Bazaar)
-
-* Uninstallation of packages.
-
-* Simple to define fixed sets of requirements and reliably reproduce a
-  set of packages.
-
-pip doesn't do everything that easy_install does. Specifically:
-
-* It cannot install from eggs.  It only installs from source.  (In the
-  future it would be good if it could install binaries from Windows ``.exe``
-  or ``.msi`` -- binary install on other platforms is not a priority.)
-
-* It doesn't understand Setuptools extras (like ``package[test]``).  This should
-  be added eventually.
-
-* It is incompatible with some packages that extensively customize distutils
-  or setuptools in their ``setup.py`` files.
-
-pip is complementary with `virtualenv
-<http://pypi.python.org/pypi/virtualenv>`__, and it is encouraged that you use
-virtualenv to isolate your installation.
-
-Using pip with virtualenv
--------------------------
-
-pip is most nutritious when used with `virtualenv
-<http://pypi.python.org/pypi/virtualenv>`__.  One of the reasons pip
-doesn't install "multi-version" eggs is that virtualenv removes much of the need
-for it.  Because pip is installed by virtualenv, just use
-``path/to/my/environment/bin/pip`` to install things into that
-specific environment.
-
-To tell pip to only run if there is a virtualenv currently activated,
-and to bail if not, use::
-
-    export PIP_REQUIRE_VIRTUALENV=true
-
-
-Using pip with virtualenvwrapper
----------------------------------
-
-If you are using `virtualenvwrapper
-<http://www.doughellmann.com/projects/virtualenvwrapper/>`_, you might
-want pip to automatically create its virtualenvs in your
-``$WORKON_HOME``.
-
-You can tell pip to do so by defining ``PIP_VIRTUALENV_BASE`` in your
-environment and setting it to the same value as that of
-``$WORKON_HOME``.
-
-Do so by adding the line::
-
-    export PIP_VIRTUALENV_BASE=$WORKON_HOME
-
-in your .bashrc under the line starting with ``export WORKON_HOME``.
-
-Using pip with buildout
------------------------
-
-If you are using `zc.buildout
-<http://pypi.python.org/pypi/zc.buildout>`_ you should look at
-`gp.recipe.pip <http://pypi.python.org/pypi/gp.recipe.pip>`_ as an
-option to use pip and virtualenv in your buildouts.
-
-Using pip with the "user scheme"
---------------------------------
-
-With Python 2.6 came the `"user scheme" for installation
-<http://docs.python.org/install/index.html#alternate-installation-the-user-scheme>`_, which means that all
-Python distributions support an alternative install location that is specific to a user.
-The default location for each OS is explained in the python documentation
-for the `site.USER_BASE <http://docs.python.org/library/site.html#site.USER_BASE>`_ variable.
-This mode of installation can be turned on by
-specifying the ``--user`` option to ``pip install``.
-
-Moreover, the "user scheme" can be customized by setting the
-``PYTHONUSERBASE`` environment variable, which updates the value of ``site.USER_BASE``.
-
-To install "somepackage" into an environment with site.USER_BASE customized to '/myappenv', do the following::
-
-    export PYTHONUSERBASE=/myappenv
-    pip install --user somepackage
-
-
-Command line completion
------------------------
-
-pip comes with support for command line completion in bash and zsh and
-allows you tab complete commands and options. To enable it you simply
-need copy the required shell script to the your shell startup file
-(e.g. ``.profile`` or ``.zprofile``) by running the special ``completion``
-command, e.g. for bash::
-
-    $ pip completion --bash >> ~/.profile
-
-And for zsh::
-
-    $ pip completion --zsh >> ~/.zprofile
-
-Alternatively, you can use the result of the ``completion`` command
-directly with the eval function of you shell, e.g. by adding::
-
-    eval "`pip completion --bash`"
-
-to your startup file.
diff --git a/vendor/pip-1.2.1/docs/requirements.txt b/vendor/pip-1.2.1/docs/requirements.txt
deleted file mode 100644
index 326937da6ed5405fe42bbb861a93576780a76c3d..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/requirements.txt
+++ /dev/null
@@ -1,246 +0,0 @@
-.. _`requirements-files`:
-
-==================
-Requirements files
-==================
-
-When installing software, and Python packages in particular, it's common that
-you get a lot of libraries installed.  You just did ``easy_install MyPackage``
-and you get a dozen packages.  Each of these packages has its own version.
-
-Maybe you ran that installation and it works.  Great!  Will it keep working?
-Did you have to provide special options to get it to find everything?  Did you
-have to install a bunch of other optional pieces?  Most of all, will you be able
-to do it again?  Requirements files give you a way to create an *environment*:
-a *set* of packages that work together.
-
-If you've ever tried to setup an application on a new system, or with slightly
-updated pieces, and had it fail, pip requirements are for you.  If you
-haven't had this problem then you will eventually, so pip requirements are
-for you too -- requirements make explicit, repeatable installation of packages.
-
-So what are requirements files?  They are very simple: lists of packages to
-install.  Instead of running something like ``pip install MyApp`` and
-getting whatever libraries come along, you can create a requirements file
-something like::
-
-    MyApp
-    Framework==0.9.4
-    Library>=0.2
-
-If you save this in ``requirements.txt``, then you can ``pip install -r
-requirements.txt``.  Regardless of what MyApp lists in ``setup.py``, you'll
-get a specific version of Framework (0.9.4) and at least the 0.2 version of
-Library.  (You might think you could list these specific versions in MyApp's
-``setup.py`` -- but if you do that you'll have to edit MyApp if you want to
-try a new version of Framework, or release a new version of MyApp if you
-determine that Library 0.3 doesn't work with your application.) You can also
-add optional libraries and support tools that MyApp doesn't strictly
-require, giving people a set of recommended libraries.
-
-You can also include "editable" packages -- packages that are checked out from
-Subversion, Git, Mercurial and Bazaar.  These are just like using the ``-e``
-option to pip.  They look like::
-
-    -e svn+http://myrepo/svn/MyApp#egg=MyApp
-
-You have to start the URL with ``svn+`` (``git+``, ``hg+`` or ``bzr+``), and
-you have to include ``#egg=Package`` so pip knows what to expect at that URL.
-You can also include ``@rev`` in the URL, e.g., ``@275`` to check out
-revision 275.
-
-Requirement files are mostly *flat*.  Maybe ``MyApp`` requires
-``Framework``, and ``Framework`` requires ``Library``.  I encourage
-you to still list all these in a single requirement file; it is the
-nature of Python programs that there are implicit bindings *directly*
-between MyApp and Library.  For instance, Framework might expose one
-of Library's objects, and so if Library is updated it might directly
-break MyApp.  If that happens you can update the requirements file to
-force an earlier version of Library, and you can do that without
-having to re-release MyApp at all.
-
-Read the `requirements file format`_ to learn about other features.
-
-Freezing Requirements
-=====================
-
-So you have a working set of packages, and you want to be able to install them
-elsewhere.  `Requirements files`_ let you install exact versions, but it won't
-tell you what all the exact versions are.
-
-To create a new requirements file from a known working environment, use::
-
-    $ pip freeze > stable-req.txt
-
-This will write a listing of *all* installed libraries to ``stable-req.txt``
-with exact versions for every library.  You may want to edit the file down after
-generating (e.g., to eliminate unnecessary libraries), but it'll give you a
-stable starting point for constructing your requirements file.
-
-You can also give it an existing requirements file, and it will use that as a
-sort of template for the new file.  So if you do::
-
-    $ pip freeze -r devel-req.txt > stable-req.txt
-
-it will keep the packages listed in ``devel-req.txt`` in order and preserve
-comments.
-
-The _`requirements file format`
-===============================
-
-The requirements file is a way to get pip to install specific packages
-to make up an *environment*.  This document describes that format.  To
-read about *when* you should use requirement files, see `Requirements
-Files <./#requirements-files>`_.
-
-Each line of the requirements file indicates something to be
-installed.  For example::
-
-    MyPackage==3.0
-
-tells pip to install the 3.0 version of MyPackage.
-
-You can also request `extras`_ in the requirements file::
-
-    MyPackage==3.0 [PDF]
-
-.. _extras: http://peak.telecommunity.com/DevCenter/setuptools#declaring-extras-optional-features-with-their-own-dependencies
-
-Packages may also be installed in an "editable" form.  This puts the
-source code into ``src/distname`` (making the name lower case) and
-runs ``python setup.py develop`` on the package.  To indicate
-editable, use ``-e``, like::
-
-    -e svn+http://svn.myproject.org/svn/MyProject/trunk#egg=MyProject
-
-The ``#egg=MyProject`` part is important, because while you can
-install simply given the svn location, the project name is useful in
-other places.
-
-You can also specify the egg name for a non-editable url. This is useful to
-point to HEAD locations on the local filesystem:
-
-    file:///path/to/your/lib/project#egg=MyProject
-
-or relative paths:
-
-    file:../../lib/project#egg=MyProject
-
-If you need to give pip (and by association easy_install) hints
-about where to find a package, you can use the ``-f``
-(``--find-links``) option, like::
-
-    $ pip -f http://someserver.org/index-of-packages MyPackage==3.0
-
-Pip will then look for a link at http://someserver.org/index-of-packages
-that matches version ``3.0`` of ``MyPackage`` -- the link should be
-like ``MyPackage-3.0.tar.gz``.
-
-And if you want to install from a tarball or zip file with a direct link,
-you don't need ``-f`` option, you just need to pass the absolute url, like::
-
-    $ pip install http://someserver.org/packages/MyPackage-3.0.tar.gz
-
-
-Version Control
----------------
-
-Right now pip knows of the following major version control systems:
-
-Subversion
-~~~~~~~~~~
-
-Pip supports the URL schemes ``svn``, ``svn+svn``, ``svn+http``, ``svn+https``, ``svn+ssh``.
-You can also give specific revisions to an SVN URL, like::
-
-    -e svn+svn://svn.myproject.org/svn/MyProject#egg=MyProject
-    -e svn+http://svn.myproject.org/svn/MyProject/trunk@2019#egg=MyProject
-
-which will check out revision 2019.  ``@{20080101}`` would also check
-out the revision from 2008-01-01. You can only check out specific
-revisions using ``-e svn+...``.
-
-Git
-~~~
-
-Pip currently supports cloning over ``git``, ``git+http`` and ``git+ssh``::
-
-    -e git://git.myproject.org/MyProject.git#egg=MyProject
-    -e git+http://git.myproject.org/MyProject/#egg=MyProject
-    -e git+ssh://git@myproject.org/MyProject/#egg=MyProject
-
-Passing branch names, a commit hash or a tag name is also possible::
-
-    -e git://git.myproject.org/MyProject.git@master#egg=MyProject
-    -e git://git.myproject.org/MyProject.git@v1.0#egg=MyProject
-    -e git://git.myproject.org/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject
-
-Mercurial
-~~~~~~~~~
-
-The supported schemes are: ``hg+http``, ``hg+https``,
-``hg+static-http`` and ``hg+ssh``::
-
-    -e hg+http://hg.myproject.org/MyProject/#egg=MyProject
-    -e hg+https://hg.myproject.org/MyProject/#egg=MyProject
-    -e hg+ssh://hg@myproject.org/MyProject/#egg=MyProject
-
-You can also specify a revision number, a revision hash, a tag name or a local
-branch name::
-
-    -e hg+http://hg.myproject.org/MyProject/@da39a3ee5e6b#egg=MyProject
-    -e hg+http://hg.myproject.org/MyProject/@2019#egg=MyProject
-    -e hg+http://hg.myproject.org/MyProject/@v1.0#egg=MyProject
-    -e hg+http://hg.myproject.org/MyProject/@special_feature#egg=MyProject
-
-Bazaar
-~~~~~~
-
-Pip supports Bazaar using the ``bzr+http``, ``bzr+https``, ``bzr+ssh``,
-``bzr+sftp``, ``bzr+ftp`` and ``bzr+lp`` schemes::
-
-    -e bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject
-    -e bzr+sftp://user@myproject.org/MyProject/trunk/#egg=MyProject
-    -e bzr+ssh://user@myproject.org/MyProject/trunk/#egg=MyProject
-    -e bzr+ftp://user@myproject.org/MyProject/trunk/#egg=MyProject
-    -e bzr+lp:MyProject#egg=MyProject
-
-Tags or revisions can be installed like this::
-
-    -e bzr+https://bzr.myproject.org/MyProject/trunk/@2019#egg=MyProject
-    -e bzr+http://bzr.myproject.org/MyProject/trunk/@v1.0#egg=MyProject
-
-Recursive Requirements
-----------------------
-
-If you wish, you can also refer to other requirements files, like::
-
-    -r Pylons-requirements.txt
-
-This gives you a way of abstracting out sets of requirements.  This
-isn't, however, very friendly with `frozen requirements
-<./#freezing-requirements>`_, as everything in
-``Pylons-requirements.txt`` will show up in your frozen file.
-
-Indexes, find-links
--------------------
-
-You can also provide values for the ``--index-url`` and ``--find-links``
-options in your requirement files, like::
-
-    --index-url http://example.com/private-pypi/
-
-Note that using ``--index-url`` removes the use of `PyPI
-<http://pypi.python.org>`_, while using ``--extra-index-url`` will add
-additional indexes.
-
-``--find-links`` is more ad-hoc; instead of a complete "index", you
-only need an HTML page of links to available packages.  Simply by
-putting all your private packages in a directory and using the Apache
-auto-index, you can publish your packages so pip can find them.
-``--find-links`` is always additive; pip looks at everything it can
-find.  Use it like::
-
-    --find-links http://example.com/private-packages/
-
-Note that all these options must be on a line of their own.
diff --git a/vendor/pip-1.2.1/docs/usage.txt b/vendor/pip-1.2.1/docs/usage.txt
deleted file mode 100644
index 991d0d54e289ef1f2ca83f91c0907d5b09746c26..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/docs/usage.txt
+++ /dev/null
@@ -1,160 +0,0 @@
-=====
-Usage
-=====
-
-Install packages
-----------------
-
-The simplest way to install a package is by specifying its name::
-
-    $ pip install SomePackage
-
-`SomePackage` is downloaded from :term:`PyPI`, along with its
-dependencies, and installed.
-
-If `SomePackage` is already installed, and you need a newer version, use ``pip
-install --upgrade SomePackage``. You can also request a specific version (``pip
-install SomePackage==1.0.4``) and specify `setuptools extras`_ (``pip install
-SomePackage[PDF]``).
-
-You can also install from a particular source distribution file, either
-local or remote::
-
-    $ pip install ./downloads/SomePackage-1.0.4.tar.gz
-    $ pip install http://my.package.repo/SomePackage-1.0.4.zip
-
-.. _setuptools extras: http://peak.telecommunity.com/DevCenter/setuptools#declaring-extras-optional-features-with-their-own-dependencies
-
-
-Edit mode
-*********
-
-Packages normally_ install under ``site-packages``, but when you're
-making changes, it makes more sense to run the package straight from the
-checked-out source tree. "Editable" installs create a ``.pth`` file in
-``site-packages`` that extends Python's import path to find the
-package::
-
-    $ pip install -e path/to/SomePackage
-
-.. _normally: http://docs.python.org/install/index.html#how-installation-works
-
-
-Version control systems
-***********************
-
-Pip knows how to check out a package from version control. `Subversion`,
-`Git`, `Mercurial` and `Bazaar` are supported. The repository will be
-checked out in a temporary folder, installed, and cleaned up::
-
-    $ pip install git+https://github.com/simplejson/simplejson.git
-    $ pip install svn+svn://svn.zope.org/repos/main/zope.interface/trunk/
-
-This can be combined with the `-e` flag, and Pip will perform the
-checkout in ``./src/``. You need to supply a name for the checkout
-folder by appending a hash to the repository URL::
-
-    $ pip install -e git+https://github.com/lakshmivyas/hyde.git#egg=hyde
-
-Note that only basic checking-out of a repo is supported; pip will not
-handle advanced VCS-specific features such as submodules or subrepos.
-
-
-Alternate package repositories
-******************************
-
-pip searches in :term:`PyPI` by default, but this can be overridden using the
-``--index-url`` option::
-
-    $ pip install --index-url http://d.pypi.python.org/simple/ SomePackage
-
-If you have your own package index with a few additional packages, you may want
-to to specify additional index URLs while still also using :term:`PyPI`::
-
-    $ pip install --extra-index-url http://my.package.repo/ SomePackage
-
-A "package index" used with ``--index-url`` or ``--extra-index-url`` can be as
-simple as a static-web-served directory, with automatic indexes on, with a
-subdirectory per package and sdists (tarballs created with ``python setup.py
-sdist``) in that directory::
-
-    mypackage/
-        mypackage-0.7.8.tar.gz
-        mypackage-1.0.1.tar.gz
-    otherpackage/
-        otherpackage-2.3.5.tar.gz
-
-If the number of packages in the index is small, it's even simpler to skip the
-subdirectories: put all of the sdists in a single directory and use pip's
-``--find-links`` option with a URL to that directory::
-
-    mypackage-0.7.8.tar.gz
-    mypackage-1.0.1.tar.gz
-    otherpackage-2.3.5.tar.gz
-
-``--find-links`` also supports local paths, so installation need not require a
-network connection.
-
-Like ``--extra-index-url``, ``--find-links`` is additive by default, it does
-not replace or supersede the index. All package sources are checked, and the
-latest qualifying version for every requested package is used. If you want only
-your ``-find-links`` URL used as package source, you need to pair it with
-``--no-index``.
-
-``--index-url``, ``--extra-index-url`` and ``--find-links`` can all be used
-within a :ref:`requirements file <requirements-files>` in addition to on the
-command line directly.
-
-
-Uninstall packages
-------------------
-
-pip is able to uninstall most installed packages with ``pip uninstall
-package-name``.
-
-Known exceptions include pure-distutils packages installed with
-``python setup.py install`` (such packages leave behind no metadata allowing
-determination of what files were installed), and script wrappers installed
-by develop-installs (``python setup.py develop``).
-
-pip also performs an automatic uninstall of an old version of a package
-before upgrading to a newer version, so outdated files (and egg-info data)
-from conflicting versions aren't left hanging around to cause trouble. The
-old version of the package is automatically restored if the new version
-fails to download or install.
-
-
-Searching for packages
-----------------------
-
-pip can search :term:`PyPI` for packages using the ``pip search``
-command::
-
-    $ pip search "query"
-
-The query will be used to search the names and summaries of all
-packages. With the ``--index`` option you can search in a different
-repository.
-
-
-Bundles
--------
-
-Another way to distribute a set of libraries is a bundle format (specific to
-pip).  This format is not stable at this time (there simply hasn't been
-any feedback, nor a great deal of thought).  A bundle file contains all the
-source for your package, and you can have pip install them all together.
-Once you have the bundle file further network access won't be necessary.  To
-build a bundle file, do::
-
-    $ pip bundle MyApp.pybundle MyApp
-
-(Using a :ref:`requirements file <requirements-files>` would be wise.)  Then
-someone else can get the file ``MyApp.pybundle`` and run::
-
-    $ pip install MyApp.pybundle
-
-This is *not* a binary format.  This only packages source.  If you have binary
-packages, then the person who installs the files will have to have a compiler,
-any necessary headers installed, etc.  Binary packages are hard, this is
-relatively easy.
diff --git a/vendor/pip-1.2.1/pip/__init__.py b/vendor/pip-1.2.1/pip/__init__.py
deleted file mode 100755
index 9580790a247d55036f0f5da926ae0176d2c2879a..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/__init__.py
+++ /dev/null
@@ -1,268 +0,0 @@
-#!/usr/bin/env python
-import os
-import optparse
-
-import subprocess
-import sys
-import re
-import difflib
-
-from pip.backwardcompat import walk_packages, console_to_str
-from pip.basecommand import command_dict, load_command, load_all_commands, command_names
-from pip.baseparser import parser
-from pip.exceptions import InstallationError
-from pip.log import logger
-from pip.util import get_installed_distributions
-
-
-def autocomplete():
-    """Command and option completion for the main option parser (and options)
-    and its subcommands (and options).
-
-    Enable by sourcing one of the completion shell scripts (bash or zsh).
-    """
-    # Don't complete if user hasn't sourced bash_completion file.
-    if 'PIP_AUTO_COMPLETE' not in os.environ:
-        return
-    cwords = os.environ['COMP_WORDS'].split()[1:]
-    cword = int(os.environ['COMP_CWORD'])
-    try:
-        current = cwords[cword-1]
-    except IndexError:
-        current = ''
-    load_all_commands()
-    subcommands = [cmd for cmd, cls in command_dict.items() if not cls.hidden]
-    options = []
-    # subcommand
-    try:
-        subcommand_name = [w for w in cwords if w in subcommands][0]
-    except IndexError:
-        subcommand_name = None
-    # subcommand options
-    if subcommand_name:
-        # special case: 'help' subcommand has no options
-        if subcommand_name == 'help':
-            sys.exit(1)
-        # special case: list locally installed dists for uninstall command
-        if subcommand_name == 'uninstall' and not current.startswith('-'):
-            installed = []
-            lc = current.lower()
-            for dist in get_installed_distributions(local_only=True):
-                if dist.key.startswith(lc) and dist.key not in cwords[1:]:
-                    installed.append(dist.key)
-            # if there are no dists installed, fall back to option completion
-            if installed:
-                for dist in installed:
-                    print(dist)
-                sys.exit(1)
-        subcommand = command_dict.get(subcommand_name)
-        options += [(opt.get_opt_string(), opt.nargs)
-                    for opt in subcommand.parser.option_list
-                    if opt.help != optparse.SUPPRESS_HELP]
-        # filter out previously specified options from available options
-        prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
-        options = [(x, v) for (x, v) in options if x not in prev_opts]
-        # filter options by current input
-        options = [(k, v) for k, v in options if k.startswith(current)]
-        for option in options:
-            opt_label = option[0]
-            # append '=' to options which require args
-            if option[1]:
-                opt_label += '='
-            print(opt_label)
-    else:
-        # show options of main parser only when necessary
-        if current.startswith('-') or current.startswith('--'):
-            subcommands += [opt.get_opt_string()
-                            for opt in parser.option_list
-                            if opt.help != optparse.SUPPRESS_HELP]
-        print(' '.join([x for x in subcommands if x.startswith(current)]))
-    sys.exit(1)
-
-
-def version_control():
-    # Import all the version control support modules:
-    from pip import vcs
-    for importer, modname, ispkg in \
-            walk_packages(path=vcs.__path__, prefix=vcs.__name__+'.'):
-        __import__(modname)
-
-
-def main(initial_args=None):
-    if initial_args is None:
-        initial_args = sys.argv[1:]
-    autocomplete()
-    version_control()
-    options, args = parser.parse_args(initial_args)
-    if options.help and not args:
-        args = ['help']
-    if not args:
-        parser.error('You must give a command (use "pip help" to see a list of commands)')
-    command = args[0].lower()
-    load_command(command)
-    if command not in command_dict:
-        close_commands = difflib.get_close_matches(command, command_names())
-        if close_commands:
-            guess = close_commands[0]
-            if args[1:]:
-                guess = "%s %s" % (guess, " ".join(args[1:]))
-        else:
-            guess = 'install %s' % command
-        error_dict = {'arg': command, 'guess': guess,
-                      'script': os.path.basename(sys.argv[0])}
-        parser.error('No command by the name %(script)s %(arg)s\n  '
-                     '(maybe you meant "%(script)s %(guess)s")' % error_dict)
-    command = command_dict[command]
-    return command.main(args[1:], options)
-
-
-def bootstrap():
-    """
-    Bootstrapping function to be called from install-pip.py script.
-    """
-    return main(['install', '--upgrade', 'pip'])
-
-############################################################
-## Writing freeze files
-
-
-class FrozenRequirement(object):
-
-    def __init__(self, name, req, editable, comments=()):
-        self.name = name
-        self.req = req
-        self.editable = editable
-        self.comments = comments
-
-    _rev_re = re.compile(r'-r(\d+)$')
-    _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
-
-    @classmethod
-    def from_dist(cls, dist, dependency_links, find_tags=False):
-        location = os.path.normcase(os.path.abspath(dist.location))
-        comments = []
-        from pip.vcs import vcs, get_src_requirement
-        if vcs.get_backend_name(location):
-            editable = True
-            req = get_src_requirement(dist, location, find_tags)
-            if req is None:
-                logger.warn('Could not determine repository location of %s' % location)
-                comments.append('## !! Could not determine repository location')
-                req = dist.as_requirement()
-                editable = False
-        else:
-            editable = False
-            req = dist.as_requirement()
-            specs = req.specs
-            assert len(specs) == 1 and specs[0][0] == '=='
-            version = specs[0][1]
-            ver_match = cls._rev_re.search(version)
-            date_match = cls._date_re.search(version)
-            if ver_match or date_match:
-                svn_backend = vcs.get_backend('svn')
-                if svn_backend:
-                    svn_location = svn_backend(
-                        ).get_location(dist, dependency_links)
-                if not svn_location:
-                    logger.warn(
-                        'Warning: cannot find svn location for %s' % req)
-                    comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
-                else:
-                    comments.append('# Installing as editable to satisfy requirement %s:' % req)
-                    if ver_match:
-                        rev = ver_match.group(1)
-                    else:
-                        rev = '{%s}' % date_match.group(1)
-                    editable = True
-                    req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
-        return cls(dist.project_name, req, editable, comments)
-
-    @staticmethod
-    def egg_name(dist):
-        name = dist.egg_name()
-        match = re.search(r'-py\d\.\d$', name)
-        if match:
-            name = name[:match.start()]
-        return name
-
-    def __str__(self):
-        req = self.req
-        if self.editable:
-            req = '-e %s' % req
-        return '\n'.join(list(self.comments)+[str(req)])+'\n'
-
-############################################################
-## Requirement files
-
-
-def call_subprocess(cmd, show_stdout=True,
-                    filter_stdout=None, cwd=None,
-                    raise_on_returncode=True,
-                    command_level=logger.DEBUG, command_desc=None,
-                    extra_environ=None):
-    if command_desc is None:
-        cmd_parts = []
-        for part in cmd:
-            if ' ' in part or '\n' in part or '"' in part or "'" in part:
-                part = '"%s"' % part.replace('"', '\\"')
-            cmd_parts.append(part)
-        command_desc = ' '.join(cmd_parts)
-    if show_stdout:
-        stdout = None
-    else:
-        stdout = subprocess.PIPE
-    logger.log(command_level, "Running command %s" % command_desc)
-    env = os.environ.copy()
-    if extra_environ:
-        env.update(extra_environ)
-    try:
-        proc = subprocess.Popen(
-            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
-            cwd=cwd, env=env)
-    except Exception:
-        e = sys.exc_info()[1]
-        logger.fatal(
-            "Error %s while executing command %s" % (e, command_desc))
-        raise
-    all_output = []
-    if stdout is not None:
-        stdout = proc.stdout
-        while 1:
-            line = console_to_str(stdout.readline())
-            if not line:
-                break
-            line = line.rstrip()
-            all_output.append(line + '\n')
-            if filter_stdout:
-                level = filter_stdout(line)
-                if isinstance(level, tuple):
-                    level, line = level
-                logger.log(level, line)
-                if not logger.stdout_level_matches(level):
-                    logger.show_progress()
-            else:
-                logger.info(line)
-    else:
-        returned_stdout, returned_stderr = proc.communicate()
-        all_output = [returned_stdout or '']
-    proc.wait()
-    if proc.returncode:
-        if raise_on_returncode:
-            if all_output:
-                logger.notify('Complete output from command %s:' % command_desc)
-                logger.notify('\n'.join(all_output) + '\n----------------------------------------')
-            raise InstallationError(
-                "Command %s failed with error code %s in %s"
-                % (command_desc, proc.returncode, cwd))
-        else:
-            logger.warn(
-                "Command %s had error code %s in %s"
-                % (command_desc, proc.returncode, cwd))
-    if stdout is not None:
-        return ''.join(all_output)
-
-
-if __name__ == '__main__':
-    exit = main()
-    if exit:
-        sys.exit(exit)
diff --git a/vendor/pip-1.2.1/pip/_pkgutil.py b/vendor/pip-1.2.1/pip/_pkgutil.py
deleted file mode 100644
index fe37d0401fdca559328a60f6ba9ecf4ac0d7d425..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/_pkgutil.py
+++ /dev/null
@@ -1,592 +0,0 @@
-"""Utilities to support packages."""
-
-# NOTE: This module must remain compatible with Python 2.3, as it is shared
-# by setuptools for distribution with Python 2.3 and up.
-
-import os
-import sys
-import imp
-import os.path
-from types import ModuleType
-
-__all__ = [
-    'get_importer', 'iter_importers', 'get_loader', 'find_loader',
-    'walk_packages', 'iter_modules',
-    'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
-]
-
-
-def read_code(stream):
-    # This helper is needed in order for the PEP 302 emulation to
-    # correctly handle compiled files
-    import marshal
-
-    magic = stream.read(4)
-    if magic != imp.get_magic():
-        return None
-
-    stream.read(4) # Skip timestamp
-    return marshal.load(stream)
-
-
-def simplegeneric(func):
-    """Make a trivial single-dispatch generic function"""
-    registry = {}
-
-    def wrapper(*args, **kw):
-        ob = args[0]
-        try:
-            cls = ob.__class__
-        except AttributeError:
-            cls = type(ob)
-        try:
-            mro = cls.__mro__
-        except AttributeError:
-            try:
-
-                class cls(cls, object):
-                    pass
-
-                mro = cls.__mro__[1:]
-            except TypeError:
-                mro = object,   # must be an ExtensionClass or some such  :(
-        for t in mro:
-            if t in registry:
-                return registry[t](*args, **kw)
-        else:
-            return func(*args, **kw)
-    try:
-        wrapper.__name__ = func.__name__
-    except (TypeError, AttributeError):
-        pass    # Python 2.3 doesn't allow functions to be renamed
-
-    def register(typ, func=None):
-        if func is None:
-            return lambda f: register(typ, f)
-        registry[typ] = func
-        return func
-
-    wrapper.__dict__ = func.__dict__
-    wrapper.__doc__ = func.__doc__
-    wrapper.register = register
-    return wrapper
-
-
-def walk_packages(path=None, prefix='', onerror=None):
-    """Yields (module_loader, name, ispkg) for all modules recursively
-    on path, or, if path is None, all accessible modules.
-
-    'path' should be either None or a list of paths to look for
-    modules in.
-
-    'prefix' is a string to output on the front of every module name
-    on output.
-
-    Note that this function must import all *packages* (NOT all
-    modules!) on the given path, in order to access the __path__
-    attribute to find submodules.
-
-    'onerror' is a function which gets called with one argument (the
-    name of the package which was being imported) if any exception
-    occurs while trying to import a package.  If no onerror function is
-    supplied, ImportErrors are caught and ignored, while all other
-    exceptions are propagated, terminating the search.
-
-    Examples:
-
-    # list all modules python can access
-    walk_packages()
-
-    # list all submodules of ctypes
-    walk_packages(ctypes.__path__, ctypes.__name__+'.')
-    """
-
-    def seen(p, m={}):
-        if p in m:
-            return True
-        m[p] = True
-
-    for importer, name, ispkg in iter_modules(path, prefix):
-        yield importer, name, ispkg
-
-        if ispkg:
-            try:
-                __import__(name)
-            except ImportError:
-                if onerror is not None:
-                    onerror(name)
-            except Exception:
-                if onerror is not None:
-                    onerror(name)
-                else:
-                    raise
-            else:
-                path = getattr(sys.modules[name], '__path__', None) or []
-
-                # don't traverse path items we've seen before
-                path = [p for p in path if not seen(p)]
-
-                for item in walk_packages(path, name+'.', onerror):
-                    yield item
-
-
-def iter_modules(path=None, prefix=''):
-    """Yields (module_loader, name, ispkg) for all submodules on path,
-    or, if path is None, all top-level modules on sys.path.
-
-    'path' should be either None or a list of paths to look for
-    modules in.
-
-    'prefix' is a string to output on the front of every module name
-    on output.
-    """
-
-    if path is None:
-        importers = iter_importers()
-    else:
-        importers = map(get_importer, path)
-
-    yielded = {}
-    for i in importers:
-        for name, ispkg in iter_importer_modules(i, prefix):
-            if name not in yielded:
-                yielded[name] = 1
-                yield i, name, ispkg
-
-
-#@simplegeneric
-def iter_importer_modules(importer, prefix=''):
-    if not hasattr(importer, 'iter_modules'):
-        return []
-    return importer.iter_modules(prefix)
-
-iter_importer_modules = simplegeneric(iter_importer_modules)
-
-
-class ImpImporter:
-    """PEP 302 Importer that wraps Python's "classic" import algorithm
-
-    ImpImporter(dirname) produces a PEP 302 importer that searches that
-    directory.  ImpImporter(None) produces a PEP 302 importer that searches
-    the current sys.path, plus any modules that are frozen or built-in.
-
-    Note that ImpImporter does not currently support being used by placement
-    on sys.meta_path.
-    """
-
-    def __init__(self, path=None):
-        self.path = path
-
-    def find_module(self, fullname, path=None):
-        # Note: we ignore 'path' argument since it is only used via meta_path
-        subname = fullname.split(".")[-1]
-        if subname != fullname and self.path is None:
-            return None
-        if self.path is None:
-            path = None
-        else:
-            path = [os.path.realpath(self.path)]
-        try:
-            file, filename, etc = imp.find_module(subname, path)
-        except ImportError:
-            return None
-        return ImpLoader(fullname, file, filename, etc)
-
-    def iter_modules(self, prefix=''):
-        if self.path is None or not os.path.isdir(self.path):
-            return
-
-        yielded = {}
-        import inspect
-
-        filenames = os.listdir(self.path)
-        filenames.sort()  # handle packages before same-named modules
-
-        for fn in filenames:
-            modname = inspect.getmodulename(fn)
-            if modname=='__init__' or modname in yielded:
-                continue
-
-            path = os.path.join(self.path, fn)
-            ispkg = False
-
-            if not modname and os.path.isdir(path) and '.' not in fn:
-                modname = fn
-                for fn in os.listdir(path):
-                    subname = inspect.getmodulename(fn)
-                    if subname=='__init__':
-                        ispkg = True
-                        break
-                else:
-                    continue    # not a package
-
-            if modname and '.' not in modname:
-                yielded[modname] = 1
-                yield prefix + modname, ispkg
-
-
-class ImpLoader:
-    """PEP 302 Loader that wraps Python's "classic" import algorithm
-    """
-    code = source = None
-
-    def __init__(self, fullname, file, filename, etc):
-        self.file = file
-        self.filename = filename
-        self.fullname = fullname
-        self.etc = etc
-
-    def load_module(self, fullname):
-        self._reopen()
-        try:
-            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
-        finally:
-            if self.file:
-                self.file.close()
-        # Note: we don't set __loader__ because we want the module to look
-        # normal; i.e. this is just a wrapper for standard import machinery
-        return mod
-
-    def get_data(self, pathname):
-        return open(pathname, "rb").read()
-
-    def _reopen(self):
-        if self.file and self.file.closed:
-            mod_type = self.etc[2]
-            if mod_type==imp.PY_SOURCE:
-                self.file = open(self.filename, 'rU')
-            elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
-                self.file = open(self.filename, 'rb')
-
-    def _fix_name(self, fullname):
-        if fullname is None:
-            fullname = self.fullname
-        elif fullname != self.fullname:
-            raise ImportError("Loader for module %s cannot handle "
-                              "module %s" % (self.fullname, fullname))
-        return fullname
-
-    def is_package(self, fullname):
-        fullname = self._fix_name(fullname)
-        return self.etc[2]==imp.PKG_DIRECTORY
-
-    def get_code(self, fullname=None):
-        fullname = self._fix_name(fullname)
-        if self.code is None:
-            mod_type = self.etc[2]
-            if mod_type==imp.PY_SOURCE:
-                source = self.get_source(fullname)
-                self.code = compile(source, self.filename, 'exec')
-            elif mod_type==imp.PY_COMPILED:
-                self._reopen()
-                try:
-                    self.code = read_code(self.file)
-                finally:
-                    self.file.close()
-            elif mod_type==imp.PKG_DIRECTORY:
-                self.code = self._get_delegate().get_code()
-        return self.code
-
-    def get_source(self, fullname=None):
-        fullname = self._fix_name(fullname)
-        if self.source is None:
-            mod_type = self.etc[2]
-            if mod_type==imp.PY_SOURCE:
-                self._reopen()
-                try:
-                    self.source = self.file.read()
-                finally:
-                    self.file.close()
-            elif mod_type==imp.PY_COMPILED:
-                if os.path.exists(self.filename[:-1]):
-                    f = open(self.filename[:-1], 'rU')
-                    self.source = f.read()
-                    f.close()
-            elif mod_type==imp.PKG_DIRECTORY:
-                self.source = self._get_delegate().get_source()
-        return self.source
-
-    def _get_delegate(self):
-        return ImpImporter(self.filename).find_module('__init__')
-
-    def get_filename(self, fullname=None):
-        fullname = self._fix_name(fullname)
-        mod_type = self.etc[2]
-        if self.etc[2]==imp.PKG_DIRECTORY:
-            return self._get_delegate().get_filename()
-        elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
-            return self.filename
-        return None
-
-
-try:
-    import zipimport
-    from zipimport import zipimporter
-
-    def iter_zipimport_modules(importer, prefix=''):
-        dirlist = list(zipimport._zip_directory_cache[importer.archive].keys())
-        dirlist.sort()
-        _prefix = importer.prefix
-        plen = len(_prefix)
-        yielded = {}
-        import inspect
-        for fn in dirlist:
-            if not fn.startswith(_prefix):
-                continue
-
-            fn = fn[plen:].split(os.sep)
-
-            if len(fn)==2 and fn[1].startswith('__init__.py'):
-                if fn[0] not in yielded:
-                    yielded[fn[0]] = 1
-                    yield fn[0], True
-
-            if len(fn)!=1:
-                continue
-
-            modname = inspect.getmodulename(fn[0])
-            if modname=='__init__':
-                continue
-
-            if modname and '.' not in modname and modname not in yielded:
-                yielded[modname] = 1
-                yield prefix + modname, False
-
-    iter_importer_modules.register(zipimporter, iter_zipimport_modules)
-
-except ImportError:
-    pass
-
-
-def get_importer(path_item):
-    """Retrieve a PEP 302 importer for the given path item
-
-    The returned importer is cached in sys.path_importer_cache
-    if it was newly created by a path hook.
-
-    If there is no importer, a wrapper around the basic import
-    machinery is returned. This wrapper is never inserted into
-    the importer cache (None is inserted instead).
-
-    The cache (or part of it) can be cleared manually if a
-    rescan of sys.path_hooks is necessary.
-    """
-    try:
-        importer = sys.path_importer_cache[path_item]
-    except KeyError:
-        for path_hook in sys.path_hooks:
-            try:
-                importer = path_hook(path_item)
-                break
-            except ImportError:
-                pass
-        else:
-            importer = None
-        sys.path_importer_cache.setdefault(path_item, importer)
-
-    if importer is None:
-        try:
-            importer = ImpImporter(path_item)
-        except ImportError:
-            importer = None
-    return importer
-
-
-def iter_importers(fullname=""):
-    """Yield PEP 302 importers for the given module name
-
-    If fullname contains a '.', the importers will be for the package
-    containing fullname, otherwise they will be importers for sys.meta_path,
-    sys.path, and Python's "classic" import machinery, in that order.  If
-    the named module is in a package, that package is imported as a side
-    effect of invoking this function.
-
-    Non PEP 302 mechanisms (e.g. the Windows registry) used by the
-    standard import machinery to find files in alternative locations
-    are partially supported, but are searched AFTER sys.path. Normally,
-    these locations are searched BEFORE sys.path, preventing sys.path
-    entries from shadowing them.
-
-    For this to cause a visible difference in behaviour, there must
-    be a module or package name that is accessible via both sys.path
-    and one of the non PEP 302 file system mechanisms. In this case,
-    the emulation will find the former version, while the builtin
-    import mechanism will find the latter.
-
-    Items of the following types can be affected by this discrepancy:
-        imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
-    """
-    if fullname.startswith('.'):
-        raise ImportError("Relative module names not supported")
-    if '.' in fullname:
-        # Get the containing package's __path__
-        pkg = '.'.join(fullname.split('.')[:-1])
-        if pkg not in sys.modules:
-            __import__(pkg)
-        path = getattr(sys.modules[pkg], '__path__', None) or []
-    else:
-        for importer in sys.meta_path:
-            yield importer
-        path = sys.path
-    for item in path:
-        yield get_importer(item)
-    if '.' not in fullname:
-        yield ImpImporter()
-
-
-def get_loader(module_or_name):
-    """Get a PEP 302 "loader" object for module_or_name
-
-    If the module or package is accessible via the normal import
-    mechanism, a wrapper around the relevant part of that machinery
-    is returned.  Returns None if the module cannot be found or imported.
-    If the named module is not already imported, its containing package
-    (if any) is imported, in order to establish the package __path__.
-
-    This function uses iter_importers(), and is thus subject to the same
-    limitations regarding platform-specific special import locations such
-    as the Windows registry.
-    """
-    if module_or_name in sys.modules:
-        module_or_name = sys.modules[module_or_name]
-    if isinstance(module_or_name, ModuleType):
-        module = module_or_name
-        loader = getattr(module, '__loader__', None)
-        if loader is not None:
-            return loader
-        fullname = module.__name__
-    else:
-        fullname = module_or_name
-    return find_loader(fullname)
-
-
-def find_loader(fullname):
-    """Find a PEP 302 "loader" object for fullname
-
-    If fullname contains dots, path must be the containing package's __path__.
-    Returns None if the module cannot be found or imported. This function uses
-    iter_importers(), and is thus subject to the same limitations regarding
-    platform-specific special import locations such as the Windows registry.
-    """
-    for importer in iter_importers(fullname):
-        loader = importer.find_module(fullname)
-        if loader is not None:
-            return loader
-
-    return None
-
-
-def extend_path(path, name):
-    """Extend a package's path.
-
-    Intended use is to place the following code in a package's __init__.py:
-
-        from pkgutil import extend_path
-        __path__ = extend_path(__path__, __name__)
-
-    This will add to the package's __path__ all subdirectories of
-    directories on sys.path named after the package.  This is useful
-    if one wants to distribute different parts of a single logical
-    package as multiple directories.
-
-    It also looks for *.pkg files beginning where * matches the name
-    argument.  This feature is similar to *.pth files (see site.py),
-    except that it doesn't special-case lines starting with 'import'.
-    A *.pkg file is trusted at face value: apart from checking for
-    duplicates, all entries found in a *.pkg file are added to the
-    path, regardless of whether they are exist the filesystem.  (This
-    is a feature.)
-
-    If the input path is not a list (as is the case for frozen
-    packages) it is returned unchanged.  The input path is not
-    modified; an extended copy is returned.  Items are only appended
-    to the copy at the end.
-
-    It is assumed that sys.path is a sequence.  Items of sys.path that
-    are not (unicode or 8-bit) strings referring to existing
-    directories are ignored.  Unicode items of sys.path that cause
-    errors when used as filenames may cause this function to raise an
-    exception (in line with os.path.isdir() behavior).
-    """
-
-    if not isinstance(path, list):
-        # This could happen e.g. when this is called from inside a
-        # frozen package.  Return the path unchanged in that case.
-        return path
-
-    pname = os.path.join(*name.split('.')) # Reconstitute as relative path
-    # Just in case os.extsep != '.'
-    sname = os.extsep.join(name.split('.'))
-    sname_pkg = sname + os.extsep + "pkg"
-    init_py = "__init__" + os.extsep + "py"
-
-    path = path[:] # Start with a copy of the existing path
-
-    from pip.backwardcompat import string_types
-
-    for dir in sys.path:
-        if not isinstance(dir, string_types) or not os.path.isdir(dir):
-            continue
-        subdir = os.path.join(dir, pname)
-        # XXX This may still add duplicate entries to path on
-        # case-insensitive filesystems
-        initfile = os.path.join(subdir, init_py)
-        if subdir not in path and os.path.isfile(initfile):
-            path.append(subdir)
-        # XXX Is this the right thing for subpackages like zope.app?
-        # It looks for a file named "zope.app.pkg"
-        pkgfile = os.path.join(dir, sname_pkg)
-        if os.path.isfile(pkgfile):
-            try:
-                f = open(pkgfile)
-            except IOError:
-                msg = sys.exc_info()[1]
-                sys.stderr.write("Can't open %s: %s\n" %
-                                 (pkgfile, msg))
-            else:
-                for line in f:
-                    line = line.rstrip('\n')
-                    if not line or line.startswith('#'):
-                        continue
-                    path.append(line) # Don't check for existence!
-                f.close()
-
-    return path
-
-
-def get_data(package, resource):
-    """Get a resource from a package.
-
-    This is a wrapper round the PEP 302 loader get_data API. The package
-    argument should be the name of a package, in standard module format
-    (foo.bar). The resource argument should be in the form of a relative
-    filename, using '/' as the path separator. The parent directory name '..'
-    is not allowed, and nor is a rooted name (starting with a '/').
-
-    The function returns a binary string, which is the contents of the
-    specified resource.
-
-    For packages located in the filesystem, which have already been imported,
-    this is the rough equivalent of
-
-        d = os.path.dirname(sys.modules[package].__file__)
-        data = open(os.path.join(d, resource), 'rb').read()
-
-    If the package cannot be located or loaded, or it uses a PEP 302 loader
-    which does not support get_data(), then None is returned.
-    """
-
-    loader = get_loader(package)
-    if loader is None or not hasattr(loader, 'get_data'):
-        return None
-    mod = sys.modules.get(package) or loader.load_module(package)
-    if mod is None or not hasattr(mod, '__file__'):
-        return None
-
-    # Modify the resource name to be compatible with the loader.get_data
-    # signature - an os.path format "filename" starting with the dirname of
-    # the package's __file__
-    parts = resource.split('/')
-    parts.insert(0, os.path.dirname(mod.__file__))
-    resource_name = os.path.join(*parts)
-    return loader.get_data(resource_name)
diff --git a/vendor/pip-1.2.1/pip/backwardcompat.py b/vendor/pip-1.2.1/pip/backwardcompat.py
deleted file mode 100644
index e33da98960484a0b75e9edd930d2050fe8651a43..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/backwardcompat.py
+++ /dev/null
@@ -1,135 +0,0 @@
-"""Stuff that differs in different Python versions"""
-
-import sys
-import os
-import shutil
-
-__all__ = ['any', 'WindowsError', 'md5', 'copytree']
-
-try:
-    WindowsError = WindowsError
-except NameError:
-    class NeverUsedException(Exception):
-        """this exception should never be raised"""
-    WindowsError = NeverUsedException
-try:
-    from hashlib import md5
-except ImportError:
-    import md5 as md5_module
-    md5 = md5_module.new
-
-try:
-    from pkgutil import walk_packages
-except ImportError:
-    # let's fall back as long as we can
-    from pip._pkgutil import walk_packages
-
-try:
-    any = any
-except NameError:
-
-    def any(seq):
-        for item in seq:
-            if item:
-                return True
-        return False
-
-console_encoding = sys.__stdout__.encoding
-
-if sys.version_info >= (3,):
-    from io import StringIO, BytesIO
-    from functools import reduce
-    from urllib.error import URLError, HTTPError
-    from queue import Queue, Empty
-    from urllib.request import url2pathname
-    from urllib.request import urlretrieve
-    from email import message as emailmessage
-    import urllib.parse as urllib
-    import urllib.request as urllib2
-    import configparser as ConfigParser
-    import xmlrpc.client as xmlrpclib
-    import urllib.parse as urlparse
-    import http.client as httplib
-
-    def cmp(a, b):
-        return (a > b) - (a < b)
-
-    def b(s):
-        return s.encode('utf-8')
-
-    def u(s):
-        return s.decode('utf-8')
-
-    def console_to_str(s):
-        try:
-            return s.decode(console_encoding)
-        except UnicodeDecodeError:
-            return s.decode('utf_8')
-
-    def fwrite(f, s):
-        f.buffer.write(b(s))
-
-    bytes = bytes
-    string_types = (str,)
-    raw_input = input
-else:
-    from cStringIO import StringIO
-    from urllib2 import URLError, HTTPError
-    from Queue import Queue, Empty
-    from urllib import url2pathname, urlretrieve
-    from email import Message as emailmessage
-    import urllib
-    import urllib2
-    import urlparse
-    import ConfigParser
-    import xmlrpclib
-    import httplib
-
-    def b(s):
-        return s
-
-    def u(s):
-        return s
-
-    def console_to_str(s):
-        return s
-
-    def fwrite(f, s):
-        f.write(s)
-
-    bytes = str
-    string_types = (basestring,)
-    reduce = reduce
-    cmp = cmp
-    raw_input = raw_input
-    BytesIO = StringIO
-
-try:
-    from email.parser import FeedParser
-except ImportError:
-    # python lesser than 2.5
-    from email.FeedParser import FeedParser
-
-from distutils.sysconfig import get_python_lib, get_python_version
-
-
-def copytree(src, dst):
-    if sys.version_info < (2, 5):
-        before_last_dir = os.path.dirname(dst)
-        if not os.path.exists(before_last_dir):
-            os.makedirs(before_last_dir)
-        shutil.copytree(src, dst)
-        shutil.copymode(src, dst)
-    else:
-        shutil.copytree(src, dst)
-
-
-def product(*args, **kwds):
-    # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
-    # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
-    pools = list(map(tuple, args)) * kwds.get('repeat', 1)
-    result = [[]]
-    for pool in pools:
-        result = [x+[y] for x in result for y in pool]
-    for prod in result:
-        yield tuple(prod)
diff --git a/vendor/pip-1.2.1/pip/basecommand.py b/vendor/pip-1.2.1/pip/basecommand.py
deleted file mode 100644
index 12bcd6211082785f21968d4b8e73222702e9552d..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/basecommand.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""Base Command class, and related routines"""
-
-import os
-import socket
-import sys
-import traceback
-import time
-
-from pip import commands
-from pip.log import logger
-from pip.baseparser import parser, ConfigOptionParser, UpdatingDefaultsHelpFormatter
-from pip.download import urlopen
-from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
-                            CommandError)
-from pip.backwardcompat import StringIO, walk_packages
-from pip.status_codes import SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND
-
-
-__all__ = ['command_dict', 'Command', 'load_all_commands',
-           'load_command', 'command_names']
-
-command_dict = {}
-
-# for backwards compatibiliy
-get_proxy = urlopen.get_proxy
-
-
-class Command(object):
-    name = None
-    usage = None
-    hidden = False
-
-    def __init__(self):
-        assert self.name
-        self.parser = ConfigOptionParser(
-            usage=self.usage,
-            prog='%s %s' % (sys.argv[0], self.name),
-            version=parser.version,
-            formatter=UpdatingDefaultsHelpFormatter(),
-            name=self.name)
-        for option in parser.option_list:
-            if not option.dest or option.dest == 'help':
-                # -h, --version, etc
-                continue
-            self.parser.add_option(option)
-        command_dict[self.name] = self
-
-    def merge_options(self, initial_options, options):
-        # Make sure we have all global options carried over
-        for attr in ['log', 'proxy', 'require_venv',
-                     'log_explicit_levels', 'log_file',
-                     'timeout', 'default_vcs',
-                     'skip_requirements_regex',
-                     'no_input', 'exists_action']:
-            setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
-        options.quiet += initial_options.quiet
-        options.verbose += initial_options.verbose
-
-    def setup_logging(self):
-        pass
-
-    def main(self, args, initial_options):
-        options, args = self.parser.parse_args(args)
-        self.merge_options(initial_options, options)
-
-        level = 1 # Notify
-        level += options.verbose
-        level -= options.quiet
-        level = logger.level_for_integer(4-level)
-        complete_log = []
-        logger.consumers.extend(
-            [(level, sys.stdout),
-             (logger.DEBUG, complete_log.append)])
-        if options.log_explicit_levels:
-            logger.explicit_levels = True
-
-        self.setup_logging()
-
-        if options.no_input:
-            os.environ['PIP_NO_INPUT'] = '1'
-
-        if options.exists_action:
-            os.environ['PIP_EXISTS_ACTION'] = ''.join(options.exists_action)
-
-        if options.require_venv:
-            # If a venv is required check if it can really be found
-            if not os.environ.get('VIRTUAL_ENV'):
-                logger.fatal('Could not find an activated virtualenv (required).')
-                sys.exit(VIRTUALENV_NOT_FOUND)
-
-        if options.log:
-            log_fp = open_logfile(options.log, 'a')
-            logger.consumers.append((logger.DEBUG, log_fp))
-        else:
-            log_fp = None
-
-        socket.setdefaulttimeout(options.timeout or None)
-
-        urlopen.setup(proxystr=options.proxy, prompting=not options.no_input)
-
-        exit = SUCCESS
-        store_log = False
-        try:
-            status = self.run(options, args)
-            # FIXME: all commands should return an exit status
-            # and when it is done, isinstance is not needed anymore
-            if isinstance(status, int):
-                exit = status
-        except (InstallationError, UninstallationError):
-            e = sys.exc_info()[1]
-            logger.fatal(str(e))
-            logger.info('Exception information:\n%s' % format_exc())
-            store_log = True
-            exit = ERROR
-        except BadCommand:
-            e = sys.exc_info()[1]
-            logger.fatal(str(e))
-            logger.info('Exception information:\n%s' % format_exc())
-            store_log = True
-            exit = ERROR
-        except CommandError:
-            e = sys.exc_info()[1]
-            logger.fatal('ERROR: %s' % e)
-            logger.info('Exception information:\n%s' % format_exc())
-            exit = ERROR
-        except KeyboardInterrupt:
-            logger.fatal('Operation cancelled by user')
-            logger.info('Exception information:\n%s' % format_exc())
-            store_log = True
-            exit = ERROR
-        except:
-            logger.fatal('Exception:\n%s' % format_exc())
-            store_log = True
-            exit = UNKNOWN_ERROR
-        if log_fp is not None:
-            log_fp.close()
-        if store_log:
-            log_fn = options.log_file
-            text = '\n'.join(complete_log)
-            logger.fatal('Storing complete log in %s' % log_fn)
-            log_fp = open_logfile(log_fn, 'w')
-            log_fp.write(text)
-            log_fp.close()
-        return exit
-
-
-def format_exc(exc_info=None):
-    if exc_info is None:
-        exc_info = sys.exc_info()
-    out = StringIO()
-    traceback.print_exception(*exc_info, **dict(file=out))
-    return out.getvalue()
-
-
-def open_logfile(filename, mode='a'):
-    """Open the named log file in append mode.
-
-    If the file already exists, a separator will also be printed to
-    the file to separate past activity from current activity.
-    """
-    filename = os.path.expanduser(filename)
-    filename = os.path.abspath(filename)
-    dirname = os.path.dirname(filename)
-    if not os.path.exists(dirname):
-        os.makedirs(dirname)
-    exists = os.path.exists(filename)
-
-    log_fp = open(filename, mode)
-    if exists:
-        log_fp.write('%s\n' % ('-'*60))
-        log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
-    return log_fp
-
-
-def load_command(name):
-    full_name = 'pip.commands.%s' % name
-    if full_name in sys.modules:
-        return
-    try:
-        __import__(full_name)
-    except ImportError:
-        pass
-
-
-def load_all_commands():
-    for name in command_names():
-        load_command(name)
-
-
-def command_names():
-    names = set((pkg[1] for pkg in walk_packages(path=commands.__path__)))
-    return list(names)
-
diff --git a/vendor/pip-1.2.1/pip/baseparser.py b/vendor/pip-1.2.1/pip/baseparser.py
deleted file mode 100644
index b3864f3dab1cb759d7e96c5625bd19f31a4e1fa2..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/baseparser.py
+++ /dev/null
@@ -1,226 +0,0 @@
-"""Base option parser setup"""
-
-import sys
-import optparse
-import pkg_resources
-import os
-from distutils.util import strtobool
-from pip.backwardcompat import ConfigParser, string_types
-from pip.locations import default_config_file, default_log_file
-
-
-class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
-    """Custom help formatter for use in ConfigOptionParser that updates
-    the defaults before expanding them, allowing them to show up correctly
-    in the help listing"""
-
-    def expand_default(self, option):
-        if self.parser is not None:
-            self.parser.update_defaults(self.parser.defaults)
-        return optparse.IndentedHelpFormatter.expand_default(self, option)
-
-
-class ConfigOptionParser(optparse.OptionParser):
-    """Custom option parser which updates its defaults by by checking the
-    configuration files and environmental variables"""
-
-    def __init__(self, *args, **kwargs):
-        self.config = ConfigParser.RawConfigParser()
-        self.name = kwargs.pop('name')
-        self.files = self.get_config_files()
-        self.config.read(self.files)
-        assert self.name
-        optparse.OptionParser.__init__(self, *args, **kwargs)
-
-    def get_config_files(self):
-        config_file = os.environ.get('PIP_CONFIG_FILE', False)
-        if config_file and os.path.exists(config_file):
-            return [config_file]
-        return [default_config_file]
-
-    def update_defaults(self, defaults):
-        """Updates the given defaults with values from the config files and
-        the environ. Does a little special handling for certain types of
-        options (lists)."""
-        # Then go and look for the other sources of configuration:
-        config = {}
-        # 1. config files
-        for section in ('global', self.name):
-            config.update(self.normalize_keys(self.get_config_section(section)))
-        # 2. environmental variables
-        config.update(self.normalize_keys(self.get_environ_vars()))
-        # Then set the options with those values
-        for key, val in config.items():
-            option = self.get_option(key)
-            if option is not None:
-                # ignore empty values
-                if not val:
-                    continue
-                # handle multiline configs
-                if option.action == 'append':
-                    val = val.split()
-                else:
-                    option.nargs = 1
-                if option.action in ('store_true', 'store_false', 'count'):
-                    val = strtobool(val)
-                try:
-                    val = option.convert_value(key, val)
-                except optparse.OptionValueError:
-                    e = sys.exc_info()[1]
-                    print("An error occured during configuration: %s" % e)
-                    sys.exit(3)
-                defaults[option.dest] = val
-        return defaults
-
-    def normalize_keys(self, items):
-        """Return a config dictionary with normalized keys regardless of
-        whether the keys were specified in environment variables or in config
-        files"""
-        normalized = {}
-        for key, val in items:
-            key = key.replace('_', '-')
-            if not key.startswith('--'):
-                key = '--%s' % key # only prefer long opts
-            normalized[key] = val
-        return normalized
-
-    def get_config_section(self, name):
-        """Get a section of a configuration"""
-        if self.config.has_section(name):
-            return self.config.items(name)
-        return []
-
-    def get_environ_vars(self, prefix='PIP_'):
-        """Returns a generator with all environmental vars with prefix PIP_"""
-        for key, val in os.environ.items():
-            if key.startswith(prefix):
-                yield (key.replace(prefix, '').lower(), val)
-
-    def get_default_values(self):
-        """Overridding to make updating the defaults after instantiation of
-        the option parser possible, update_defaults() does the dirty work."""
-        if not self.process_default_values:
-            # Old, pre-Optik 1.5 behaviour.
-            return optparse.Values(self.defaults)
-
-        defaults = self.update_defaults(self.defaults.copy()) # ours
-        for option in self._get_all_options():
-            default = defaults.get(option.dest)
-            if isinstance(default, string_types):
-                opt_str = option.get_opt_string()
-                defaults[option.dest] = option.check_value(opt_str, default)
-        return optparse.Values(defaults)
-
-try:
-    pip_dist = pkg_resources.get_distribution('pip')
-    version = '%s from %s (python %s)' % (
-        pip_dist, pip_dist.location, sys.version[:3])
-except pkg_resources.DistributionNotFound:
-    # when running pip.py without installing
-    version=None
-
-parser = ConfigOptionParser(
-    usage='%prog COMMAND [OPTIONS]',
-    version=version,
-    add_help_option=False,
-    formatter=UpdatingDefaultsHelpFormatter(),
-    name='global')
-
-parser.add_option(
-    '-h', '--help',
-    dest='help',
-    action='store_true',
-    help='Show help')
-parser.add_option(
-    # Run only if inside a virtualenv, bail if not.
-    '--require-virtualenv', '--require-venv',
-    dest='require_venv',
-    action='store_true',
-    default=False,
-    help=optparse.SUPPRESS_HELP)
-
-parser.add_option(
-    '-v', '--verbose',
-    dest='verbose',
-    action='count',
-    default=0,
-    help='Give more output')
-parser.add_option(
-    '-q', '--quiet',
-    dest='quiet',
-    action='count',
-    default=0,
-    help='Give less output')
-parser.add_option(
-    '--log',
-    dest='log',
-    metavar='FILENAME',
-    help='Log file where a complete (maximum verbosity) record will be kept')
-parser.add_option(
-    # Writes the log levels explicitely to the log'
-    '--log-explicit-levels',
-    dest='log_explicit_levels',
-    action='store_true',
-    default=False,
-    help=optparse.SUPPRESS_HELP)
-parser.add_option(
-    # The default log file
-    '--local-log', '--log-file',
-    dest='log_file',
-    metavar='FILENAME',
-    default=default_log_file,
-    help=optparse.SUPPRESS_HELP)
-parser.add_option(
-    # Don't ask for input
-    '--no-input',
-    dest='no_input',
-    action='store_true',
-    default=False,
-    help=optparse.SUPPRESS_HELP)
-
-parser.add_option(
-    '--proxy',
-    dest='proxy',
-    type='str',
-    default='',
-    help="Specify a proxy in the form user:passwd@proxy.server:port. "
-    "Note that the user:password@ is optional and required only if you "
-    "are behind an authenticated proxy.  If you provide "
-    "user@proxy.server:port then you will be prompted for a password.")
-parser.add_option(
-    '--timeout', '--default-timeout',
-    metavar='SECONDS',
-    dest='timeout',
-    type='float',
-    default=15,
-    help='Set the socket timeout (default %default seconds)')
-parser.add_option(
-    # The default version control system for editables, e.g. 'svn'
-    '--default-vcs',
-    dest='default_vcs',
-    type='str',
-    default='',
-    help=optparse.SUPPRESS_HELP)
-parser.add_option(
-    # A regex to be used to skip requirements
-    '--skip-requirements-regex',
-    dest='skip_requirements_regex',
-    type='str',
-    default='',
-    help=optparse.SUPPRESS_HELP)
-
-parser.add_option(
-    # Option when path already exist
-    '--exists-action',
-    dest='exists_action',
-    type='choice',
-    choices=['s', 'i', 'w', 'b'],
-    default=[],
-    action='append',
-    help="Default action when a path already exists."
-         "Use this option more then one time to specify "
-         "another action if a certain option is not "
-         "available, choices: "
-         "(s)witch, (i)gnore, (w)ipe, (b)ackup")
-
-parser.disable_interspersed_args()
diff --git a/vendor/pip-1.2.1/pip/commands/__init__.py b/vendor/pip-1.2.1/pip/commands/__init__.py
deleted file mode 100644
index 792d6005489ebee62cde02066f19c5521e620451..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-#
diff --git a/vendor/pip-1.2.1/pip/commands/bundle.py b/vendor/pip-1.2.1/pip/commands/bundle.py
deleted file mode 100644
index f782f1bc315c5795751518f1219139d40a230fb0..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/bundle.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from pip.locations import build_prefix, src_prefix
-from pip.util import display_path, backup_dir
-from pip.log import logger
-from pip.exceptions import InstallationError
-from pip.commands.install import InstallCommand
-
-
-class BundleCommand(InstallCommand):
-    name = 'bundle'
-    usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
-    summary = 'Create pybundles (archives containing multiple packages)'
-    bundle = True
-
-    def __init__(self):
-        super(BundleCommand, self).__init__()
-        # bundle uses different default source and build dirs
-        build_opt = self.parser.get_option("--build")
-        build_opt.default = backup_dir(build_prefix, '-bundle')
-        src_opt = self.parser.get_option("--src")
-        src_opt.default = backup_dir(src_prefix, '-bundle')
-        self.parser.set_defaults(**{
-                src_opt.dest: src_opt.default,
-                build_opt.dest: build_opt.default,
-                })
-
-    def run(self, options, args):
-        if not args:
-            raise InstallationError('You must give a bundle filename')
-        # We have to get everything when creating a bundle:
-        options.ignore_installed = True
-        logger.notify('Putting temporary build files in %s and source/develop files in %s'
-                      % (display_path(options.build_dir), display_path(options.src_dir)))
-        self.bundle_filename = args.pop(0)
-        requirement_set = super(BundleCommand, self).run(options, args)
-        return requirement_set
-
-
-BundleCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/completion.py b/vendor/pip-1.2.1/pip/commands/completion.py
deleted file mode 100644
index 5b93d9cefe9efae9c94d53ad953cbdd84c9638e1..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/completion.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import sys
-from pip.basecommand import Command
-
-BASE_COMPLETION = """
-# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
-"""
-
-COMPLETION_SCRIPTS = {
-    'bash': """
-_pip_completion()
-{
-    COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
-                   COMP_CWORD=$COMP_CWORD \\
-                   PIP_AUTO_COMPLETE=1 $1 ) )
-}
-complete -o default -F _pip_completion pip
-""", 'zsh': """
-function _pip_completion {
-  local words cword
-  read -Ac words
-  read -cn cword
-  reply=( $( COMP_WORDS="$words[*]" \\
-             COMP_CWORD=$(( cword-1 )) \\
-             PIP_AUTO_COMPLETE=1 $words[1] ) )
-}
-compctl -K _pip_completion pip
-"""}
-
-
-class CompletionCommand(Command):
-    name = 'completion'
-    summary = 'A helper command to be used for command completion'
-    hidden = True
-
-    def __init__(self):
-        super(CompletionCommand, self).__init__()
-        self.parser.add_option(
-            '--bash', '-b',
-            action='store_const',
-            const='bash',
-            dest='shell',
-            help='Emit completion code for bash')
-        self.parser.add_option(
-            '--zsh', '-z',
-            action='store_const',
-            const='zsh',
-            dest='shell',
-            help='Emit completion code for zsh')
-
-    def run(self, options, args):
-        """Prints the completion code of the given shell"""
-        shells = COMPLETION_SCRIPTS.keys()
-        shell_options = ['--'+shell for shell in sorted(shells)]
-        if options.shell in shells:
-            script = COMPLETION_SCRIPTS.get(options.shell, '')
-            print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
-        else:
-            sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))
-
-CompletionCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/freeze.py b/vendor/pip-1.2.1/pip/commands/freeze.py
deleted file mode 100644
index 03ac80f55d1bab7ee2daa43050f213aa42a5df4e..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/freeze.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import re
-import sys
-import pkg_resources
-import pip
-from pip.req import InstallRequirement
-from pip.log import logger
-from pip.basecommand import Command
-from pip.util import get_installed_distributions
-
-
-class FreezeCommand(Command):
-    name = 'freeze'
-    usage = '%prog [OPTIONS]'
-    summary = 'Output all currently installed packages (exact versions) to stdout'
-
-    def __init__(self):
-        super(FreezeCommand, self).__init__()
-        self.parser.add_option(
-            '-r', '--requirement',
-            dest='requirement',
-            action='store',
-            default=None,
-            metavar='FILENAME',
-            help='Use the given requirements file as a hint about how to generate the new frozen requirements')
-        self.parser.add_option(
-            '-f', '--find-links',
-            dest='find_links',
-            action='append',
-            default=[],
-            metavar='URL',
-            help='URL for finding packages, which will be added to the frozen requirements file')
-        self.parser.add_option(
-            '-l', '--local',
-            dest='local',
-            action='store_true',
-            default=False,
-            help='If in a virtualenv, do not report globally-installed packages')
-
-    def setup_logging(self):
-        logger.move_stdout_to_stderr()
-
-    def run(self, options, args):
-        requirement = options.requirement
-        find_links = options.find_links or []
-        local_only = options.local
-        ## FIXME: Obviously this should be settable:
-        find_tags = False
-        skip_match = None
-
-        skip_regex = options.skip_requirements_regex
-        if skip_regex:
-            skip_match = re.compile(skip_regex)
-
-        dependency_links = []
-
-        f = sys.stdout
-
-        for dist in pkg_resources.working_set:
-            if dist.has_metadata('dependency_links.txt'):
-                dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
-        for link in find_links:
-            if '#egg=' in link:
-                dependency_links.append(link)
-        for link in find_links:
-            f.write('-f %s\n' % link)
-        installations = {}
-        for dist in get_installed_distributions(local_only=local_only):
-            req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
-            installations[req.name] = req
-        if requirement:
-            req_f = open(requirement)
-            for line in req_f:
-                if not line.strip() or line.strip().startswith('#'):
-                    f.write(line)
-                    continue
-                if skip_match and skip_match.search(line):
-                    f.write(line)
-                    continue
-                elif line.startswith('-e') or line.startswith('--editable'):
-                    if line.startswith('-e'):
-                        line = line[2:].strip()
-                    else:
-                        line = line[len('--editable'):].strip().lstrip('=')
-                    line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
-                elif (line.startswith('-r') or line.startswith('--requirement')
-                      or line.startswith('-Z') or line.startswith('--always-unzip')
-                      or line.startswith('-f') or line.startswith('-i')
-                      or line.startswith('--extra-index-url')
-                      or line.startswith('--find-links')
-                      or line.startswith('--index-url')):
-                    f.write(line)
-                    continue
-                else:
-                    line_req = InstallRequirement.from_line(line)
-                if not line_req.name:
-                    logger.notify("Skipping line because it's not clear what it would install: %s"
-                                  % line.strip())
-                    logger.notify("  (add #egg=PackageName to the URL to avoid this warning)")
-                    continue
-                if line_req.name not in installations:
-                    logger.warn("Requirement file contains %s, but that package is not installed"
-                                % line.strip())
-                    continue
-                f.write(str(installations[line_req.name]))
-                del installations[line_req.name]
-            f.write('## The following requirements were added by pip --freeze:\n')
-        for installation in sorted(installations.values(), key=lambda x: x.name):
-            f.write(str(installation))
-
-
-FreezeCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/help.py b/vendor/pip-1.2.1/pip/commands/help.py
deleted file mode 100644
index 4d504c521bb9d70af9474a16439fb13b06a5217a..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/help.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from pip.basecommand import (Command, command_dict,
-                             load_all_commands, SUCCESS,
-                             ERROR)
-from pip.exceptions import CommandError
-from pip.baseparser import parser
-
-
-class HelpCommand(Command):
-    name = 'help'
-    usage = '%prog'
-    summary = 'Show available commands'
-
-    def run(self, options, args):
-        load_all_commands()
-        if args:
-            ## FIXME: handle errors better here
-            command = args[0]
-            if command not in command_dict:
-                raise CommandError('No command with the name: %s' % command)
-            command = command_dict[command]
-            command.parser.print_help()
-            return SUCCESS
-        parser.print_help()
-        print('\nCommands available:')
-        commands = list(set(command_dict.values()))
-        commands.sort(key=lambda x: x.name)
-        for command in commands:
-            if command.hidden:
-                continue
-            print('  %s: %s' % (command.name, command.summary))
-        return SUCCESS
-
-HelpCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/install.py b/vendor/pip-1.2.1/pip/commands/install.py
deleted file mode 100644
index 925d57feb4a3203fec57f316408eb3873cce3d97..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/install.py
+++ /dev/null
@@ -1,279 +0,0 @@
-import os
-import sys
-import tempfile
-import shutil
-from pip.req import InstallRequirement, RequirementSet
-from pip.req import parse_requirements
-from pip.log import logger
-from pip.locations import build_prefix, src_prefix
-from pip.basecommand import Command
-from pip.index import PackageFinder
-from pip.exceptions import InstallationError, CommandError
-
-
-class InstallCommand(Command):
-    name = 'install'
-    usage = '%prog [OPTIONS] PACKAGE_NAMES...'
-    summary = 'Install packages'
-    bundle = False
-
-    def __init__(self):
-        super(InstallCommand, self).__init__()
-        self.parser.add_option(
-            '-e', '--editable',
-            dest='editables',
-            action='append',
-            default=[],
-            metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
-            help='Install a package directly from a checkout. Source will be checked '
-            'out into src/PACKAGE (lower-case) and installed in-place (using '
-            'setup.py develop). You can run this on an existing directory/checkout (like '
-            'pip install -e src/mycheckout). This option may be provided multiple times. '
-            'Possible values for VCS are: svn, git, hg and bzr.')
-        self.parser.add_option(
-            '-r', '--requirement',
-            dest='requirements',
-            action='append',
-            default=[],
-            metavar='FILENAME',
-            help='Install all the packages listed in the given requirements file.  '
-            'This option can be used multiple times.')
-        self.parser.add_option(
-            '-f', '--find-links',
-            dest='find_links',
-            action='append',
-            default=[],
-            metavar='URL',
-            help='URL to look for packages at')
-        self.parser.add_option(
-            '-i', '--index-url', '--pypi-url',
-            dest='index_url',
-            metavar='URL',
-            default='http://pypi.python.org/simple/',
-            help='Base URL of Python Package Index (default %default)')
-        self.parser.add_option(
-            '--extra-index-url',
-            dest='extra_index_urls',
-            metavar='URL',
-            action='append',
-            default=[],
-            help='Extra URLs of package indexes to use in addition to --index-url')
-        self.parser.add_option(
-            '--no-index',
-            dest='no_index',
-            action='store_true',
-            default=False,
-            help='Ignore package index (only looking at --find-links URLs instead)')
-        self.parser.add_option(
-            '-M', '--use-mirrors',
-            dest='use_mirrors',
-            action='store_true',
-            default=False,
-            help='Use the PyPI mirrors as a fallback in case the main index is down.')
-        self.parser.add_option(
-            '--mirrors',
-            dest='mirrors',
-            metavar='URL',
-            action='append',
-            default=[],
-            help='Specific mirror URLs to query when --use-mirrors is used')
-
-        self.parser.add_option(
-            '-b', '--build', '--build-dir', '--build-directory',
-            dest='build_dir',
-            metavar='DIR',
-            default=build_prefix,
-            help='Unpack packages into DIR (default %default) and build from there')
-        self.parser.add_option(
-            '-t', '--target',
-            dest='target_dir',
-            metavar='DIR',
-            default=None,
-            help='Install packages into DIR.')
-        self.parser.add_option(
-            '-d', '--download', '--download-dir', '--download-directory',
-            dest='download_dir',
-            metavar='DIR',
-            default=None,
-            help='Download packages into DIR instead of installing them')
-        self.parser.add_option(
-            '--download-cache',
-            dest='download_cache',
-            metavar='DIR',
-            default=None,
-            help='Cache downloaded packages in DIR')
-        self.parser.add_option(
-            '--src', '--source', '--source-dir', '--source-directory',
-            dest='src_dir',
-            metavar='DIR',
-            default=src_prefix,
-            help='Check out --editable packages into DIR (default %default)')
-
-        self.parser.add_option(
-            '-U', '--upgrade',
-            dest='upgrade',
-            action='store_true',
-            help='Upgrade all packages to the newest available version')
-        self.parser.add_option(
-            '--force-reinstall',
-            dest='force_reinstall',
-            action='store_true',
-            help='When upgrading, reinstall all packages even if they are '
-                 'already up-to-date.')
-        self.parser.add_option(
-            '-I', '--ignore-installed',
-            dest='ignore_installed',
-            action='store_true',
-            help='Ignore the installed packages (reinstalling instead)')
-        self.parser.add_option(
-            '--no-deps', '--no-dependencies',
-            dest='ignore_dependencies',
-            action='store_true',
-            default=False,
-            help='Ignore package dependencies')
-        self.parser.add_option(
-            '--no-install',
-            dest='no_install',
-            action='store_true',
-            help="Download and unpack all packages, but don't actually install them")
-        self.parser.add_option(
-            '--no-download',
-            dest='no_download',
-            action="store_true",
-            help="Don't download any packages, just install the ones already downloaded "
-            "(completes an install run with --no-install)")
-
-        self.parser.add_option(
-            '--install-option',
-            dest='install_options',
-            action='append',
-            help="Extra arguments to be supplied to the setup.py install "
-            "command (use like --install-option=\"--install-scripts=/usr/local/bin\").  "
-            "Use multiple --install-option options to pass multiple options to setup.py install.  "
-            "If you are using an option with a directory path, be sure to use absolute path.")
-
-        self.parser.add_option(
-            '--global-option',
-            dest='global_options',
-            action='append',
-            help="Extra global options to be supplied to the setup.py"
-            "call before the install command")
-
-        self.parser.add_option(
-            '--user',
-            dest='use_user_site',
-            action='store_true',
-            help='Install to user-site')
-
-    def _build_package_finder(self, options, index_urls):
-        """
-        Create a package finder appropriate to this install command.
-        This method is meant to be overridden by subclasses, not
-        called directly.
-        """
-        return PackageFinder(find_links=options.find_links,
-                             index_urls=index_urls,
-                             use_mirrors=options.use_mirrors,
-                             mirrors=options.mirrors)
-
-    def run(self, options, args):
-        if options.download_dir:
-            options.no_install = True
-            options.ignore_installed = True
-        options.build_dir = os.path.abspath(options.build_dir)
-        options.src_dir = os.path.abspath(options.src_dir)
-        install_options = options.install_options or []
-        if options.use_user_site:
-            install_options.append('--user')
-        if options.target_dir:
-            options.ignore_installed = True
-            temp_target_dir = tempfile.mkdtemp()
-            options.target_dir = os.path.abspath(options.target_dir)
-            if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
-                raise CommandError("Target path exists but is not a directory, will not continue.")
-            install_options.append('--home=' + temp_target_dir)
-        global_options = options.global_options or []
-        index_urls = [options.index_url] + options.extra_index_urls
-        if options.no_index:
-            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
-            index_urls = []
-
-        finder = self._build_package_finder(options, index_urls)
-
-        requirement_set = RequirementSet(
-            build_dir=options.build_dir,
-            src_dir=options.src_dir,
-            download_dir=options.download_dir,
-            download_cache=options.download_cache,
-            upgrade=options.upgrade,
-            ignore_installed=options.ignore_installed,
-            ignore_dependencies=options.ignore_dependencies,
-            force_reinstall=options.force_reinstall)
-        for name in args:
-            requirement_set.add_requirement(
-                InstallRequirement.from_line(name, None))
-        for name in options.editables:
-            requirement_set.add_requirement(
-                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
-        for filename in options.requirements:
-            for req in parse_requirements(filename, finder=finder, options=options):
-                requirement_set.add_requirement(req)
-        if not requirement_set.has_requirements:
-            opts = {'name': self.name}
-            if options.find_links:
-                msg = ('You must give at least one requirement to %(name)s '
-                       '(maybe you meant "pip %(name)s %(links)s"?)' %
-                       dict(opts, links=' '.join(options.find_links)))
-            else:
-                msg = ('You must give at least one requirement '
-                       'to %(name)s (see "pip help %(name)s")' % opts)
-            logger.warn(msg)
-            return
-
-        if (options.use_user_site and
-            sys.version_info < (2, 6)):
-            raise InstallationError('--user is only supported in Python version 2.6 and newer')
-
-        import setuptools
-        if (options.use_user_site and
-            requirement_set.has_editables and
-            not getattr(setuptools, '_distribute', False)):
-
-            raise InstallationError('--user --editable not supported with setuptools, use distribute')
-
-        if not options.no_download:
-            requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
-        else:
-            requirement_set.locate_files()
-
-        if not options.no_install and not self.bundle:
-            requirement_set.install(install_options, global_options)
-            installed = ' '.join([req.name for req in
-                                  requirement_set.successfully_installed])
-            if installed:
-                logger.notify('Successfully installed %s' % installed)
-        elif not self.bundle:
-            downloaded = ' '.join([req.name for req in
-                                   requirement_set.successfully_downloaded])
-            if downloaded:
-                logger.notify('Successfully downloaded %s' % downloaded)
-        elif self.bundle:
-            requirement_set.create_bundle(self.bundle_filename)
-            logger.notify('Created bundle in %s' % self.bundle_filename)
-        # Clean up
-        if not options.no_install or options.download_dir:
-            requirement_set.cleanup_files(bundle=self.bundle)
-        if options.target_dir:
-            if not os.path.exists(options.target_dir):
-                os.makedirs(options.target_dir)
-            lib_dir = os.path.join(temp_target_dir, "lib/python/")
-            for item in os.listdir(lib_dir):
-                shutil.move(
-                    os.path.join(lib_dir, item),
-                    os.path.join(options.target_dir, item)
-                    )
-            shutil.rmtree(temp_target_dir)
-        return requirement_set
-
-
-InstallCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/search.py b/vendor/pip-1.2.1/pip/commands/search.py
deleted file mode 100644
index 9f287e594f1beefa9ed575cb3392991ce0192fe4..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/search.py
+++ /dev/null
@@ -1,127 +0,0 @@
-import sys
-import textwrap
-import pkg_resources
-import pip.download
-from pip.basecommand import Command, SUCCESS
-from pip.util import get_terminal_size
-from pip.log import logger
-from pip.backwardcompat import xmlrpclib, reduce, cmp
-from pip.exceptions import CommandError
-from pip.status_codes import NO_MATCHES_FOUND
-from distutils.version import StrictVersion, LooseVersion
-
-
-class SearchCommand(Command):
-    name = 'search'
-    usage = '%prog QUERY'
-    summary = 'Search PyPI'
-
-    def __init__(self):
-        super(SearchCommand, self).__init__()
-        self.parser.add_option(
-            '--index',
-            dest='index',
-            metavar='URL',
-            default='http://pypi.python.org/pypi',
-            help='Base URL of Python Package Index (default %default)')
-
-    def run(self, options, args):
-        if not args:
-            raise CommandError('Missing required argument (search query).')
-        query = args
-        index_url = options.index
-
-        pypi_hits = self.search(query, index_url)
-        hits = transform_hits(pypi_hits)
-
-        terminal_width = None
-        if sys.stdout.isatty():
-            terminal_width = get_terminal_size()[0]
-
-        print_results(hits, terminal_width=terminal_width)
-        if pypi_hits:
-            return SUCCESS
-        return NO_MATCHES_FOUND
-
-    def search(self, query, index_url):
-        pypi = xmlrpclib.ServerProxy(index_url, pip.download.xmlrpclib_transport)
-        hits = pypi.search({'name': query, 'summary': query}, 'or')
-        return hits
-
-
-def transform_hits(hits):
-    """
-    The list from pypi is really a list of versions. We want a list of
-    packages with the list of versions stored inline. This converts the
-    list from pypi into one we can use.
-    """
-    packages = {}
-    for hit in hits:
-        name = hit['name']
-        summary = hit['summary']
-        version = hit['version']
-        score = hit['_pypi_ordering']
-
-        if name not in packages.keys():
-            packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score}
-        else:
-            packages[name]['versions'].append(version)
-
-            # if this is the highest version, replace summary and score
-            if version == highest_version(packages[name]['versions']):
-                packages[name]['summary'] = summary
-                packages[name]['score'] = score
-
-    # each record has a unique name now, so we will convert the dict into a list sorted by score
-    package_list = sorted(packages.values(), key=lambda x: x['score'], reverse=True)
-    return package_list
-
-
-def print_results(hits, name_column_width=25, terminal_width=None):
-    installed_packages = [p.project_name for p in pkg_resources.working_set]
-    for hit in hits:
-        name = hit['name']
-        summary = hit['summary'] or ''
-        if terminal_width is not None:
-            # wrap and indent summary to fit terminal
-            summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
-            summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
-        line = '%s - %s' % (name.ljust(name_column_width), summary)
-        try:
-            logger.notify(line)
-            if name in installed_packages:
-                dist = pkg_resources.get_distribution(name)
-                logger.indent += 2
-                try:
-                    latest = highest_version(hit['versions'])
-                    if dist.version == latest:
-                        logger.notify('INSTALLED: %s (latest)' % dist.version)
-                    else:
-                        logger.notify('INSTALLED: %s' % dist.version)
-                        logger.notify('LATEST:    %s' % latest)
-                finally:
-                    logger.indent -= 2
-        except UnicodeEncodeError:
-            pass
-
-
-def compare_versions(version1, version2):
-    try:
-        return cmp(StrictVersion(version1), StrictVersion(version2))
-    # in case of abnormal version number, fall back to LooseVersion
-    except ValueError:
-        pass
-    try:
-        return cmp(LooseVersion(version1), LooseVersion(version2))
-    except TypeError:
-    # certain LooseVersion comparions raise due to unorderable types,
-    # fallback to string comparison
-        return cmp([str(v) for v in LooseVersion(version1).version],
-                   [str(v) for v in LooseVersion(version2).version])
-
-
-def highest_version(versions):
-    return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions)
-
-
-SearchCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/uninstall.py b/vendor/pip-1.2.1/pip/commands/uninstall.py
deleted file mode 100644
index 9f2b891218fcc7dac7507516ee41257af7a6fc0f..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/uninstall.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from pip.req import InstallRequirement, RequirementSet, parse_requirements
-from pip.basecommand import Command
-from pip.exceptions import InstallationError
-
-
-class UninstallCommand(Command):
-    name = 'uninstall'
-    usage = '%prog [OPTIONS] PACKAGE_NAMES ...'
-    summary = 'Uninstall packages'
-
-    def __init__(self):
-        super(UninstallCommand, self).__init__()
-        self.parser.add_option(
-            '-r', '--requirement',
-            dest='requirements',
-            action='append',
-            default=[],
-            metavar='FILENAME',
-            help='Uninstall all the packages listed in the given requirements file.  '
-            'This option can be used multiple times.')
-        self.parser.add_option(
-            '-y', '--yes',
-            dest='yes',
-            action='store_true',
-            help="Don't ask for confirmation of uninstall deletions.")
-
-    def run(self, options, args):
-        requirement_set = RequirementSet(
-            build_dir=None,
-            src_dir=None,
-            download_dir=None)
-        for name in args:
-            requirement_set.add_requirement(
-                InstallRequirement.from_line(name))
-        for filename in options.requirements:
-            for req in parse_requirements(filename, options=options):
-                requirement_set.add_requirement(req)
-        if not requirement_set.has_requirements:
-            raise InstallationError('You must give at least one requirement '
-                'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
-        requirement_set.uninstall(auto_confirm=options.yes)
-
-UninstallCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/unzip.py b/vendor/pip-1.2.1/pip/commands/unzip.py
deleted file mode 100644
index f83e182059e35bb7516a7e080d4a0239f6eafaaf..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/unzip.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from pip.commands.zip import ZipCommand
-
-
-class UnzipCommand(ZipCommand):
-    name = 'unzip'
-    summary = 'Unzip individual packages'
-
-
-UnzipCommand()
diff --git a/vendor/pip-1.2.1/pip/commands/zip.py b/vendor/pip-1.2.1/pip/commands/zip.py
deleted file mode 100644
index ebe1d791a4d87c11324a1c76dc59495f1beded4e..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/commands/zip.py
+++ /dev/null
@@ -1,346 +0,0 @@
-import sys
-import re
-import fnmatch
-import os
-import shutil
-import zipfile
-from pip.util import display_path, backup_dir, rmtree
-from pip.log import logger
-from pip.exceptions import InstallationError
-from pip.basecommand import Command
-
-
-class ZipCommand(Command):
-    name = 'zip'
-    usage = '%prog [OPTIONS] PACKAGE_NAMES...'
-    summary = 'Zip individual packages'
-
-    def __init__(self):
-        super(ZipCommand, self).__init__()
-        if self.name == 'zip':
-            self.parser.add_option(
-                '--unzip',
-                action='store_true',
-                dest='unzip',
-                help='Unzip (rather than zip) a package')
-        else:
-            self.parser.add_option(
-                '--zip',
-                action='store_false',
-                dest='unzip',
-                default=True,
-                help='Zip (rather than unzip) a package')
-        self.parser.add_option(
-            '--no-pyc',
-            action='store_true',
-            dest='no_pyc',
-            help='Do not include .pyc files in zip files (useful on Google App Engine)')
-        self.parser.add_option(
-            '-l', '--list',
-            action='store_true',
-            dest='list',
-            help='List the packages available, and their zip status')
-        self.parser.add_option(
-            '--sort-files',
-            action='store_true',
-            dest='sort_files',
-            help='With --list, sort packages according to how many files they contain')
-        self.parser.add_option(
-            '--path',
-            action='append',
-            dest='paths',
-            help='Restrict operations to the given paths (may include wildcards)')
-        self.parser.add_option(
-            '-n', '--simulate',
-            action='store_true',
-            help='Do not actually perform the zip/unzip operation')
-
-    def paths(self):
-        """All the entries of sys.path, possibly restricted by --path"""
-        if not self.select_paths:
-            return sys.path
-        result = []
-        match_any = set()
-        for path in sys.path:
-            path = os.path.normcase(os.path.abspath(path))
-            for match in self.select_paths:
-                match = os.path.normcase(os.path.abspath(match))
-                if '*' in match:
-                    if re.search(fnmatch.translate(match+'*'), path):
-                        result.append(path)
-                        match_any.add(match)
-                        break
-                else:
-                    if path.startswith(match):
-                        result.append(path)
-                        match_any.add(match)
-                        break
-            else:
-                logger.debug("Skipping path %s because it doesn't match %s"
-                             % (path, ', '.join(self.select_paths)))
-        for match in self.select_paths:
-            if match not in match_any and '*' not in match:
-                result.append(match)
-                logger.debug("Adding path %s because it doesn't match anything already on sys.path"
-                             % match)
-        return result
-
-    def run(self, options, args):
-        self.select_paths = options.paths
-        self.simulate = options.simulate
-        if options.list:
-            return self.list(options, args)
-        if not args:
-            raise InstallationError(
-                'You must give at least one package to zip or unzip')
-        packages = []
-        for arg in args:
-            module_name, filename = self.find_package(arg)
-            if options.unzip and os.path.isdir(filename):
-                raise InstallationError(
-                    'The module %s (in %s) is not a zip file; cannot be unzipped'
-                    % (module_name, filename))
-            elif not options.unzip and not os.path.isdir(filename):
-                raise InstallationError(
-                    'The module %s (in %s) is not a directory; cannot be zipped'
-                    % (module_name, filename))
-            packages.append((module_name, filename))
-        last_status = None
-        for module_name, filename in packages:
-            if options.unzip:
-                last_status = self.unzip_package(module_name, filename)
-            else:
-                last_status = self.zip_package(module_name, filename, options.no_pyc)
-        return last_status
-
-    def unzip_package(self, module_name, filename):
-        zip_filename = os.path.dirname(filename)
-        if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
-            raise InstallationError(
-                'Module %s (in %s) isn\'t located in a zip file in %s'
-                % (module_name, filename, zip_filename))
-        package_path = os.path.dirname(zip_filename)
-        if not package_path in self.paths():
-            logger.warn(
-                'Unpacking %s into %s, but %s is not on sys.path'
-                % (display_path(zip_filename), display_path(package_path),
-                   display_path(package_path)))
-        logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
-        if self.simulate:
-            logger.notify('Skipping remaining operations because of --simulate')
-            return
-        logger.indent += 2
-        try:
-            ## FIXME: this should be undoable:
-            zip = zipfile.ZipFile(zip_filename)
-            to_save = []
-            for name in zip.namelist():
-                if name.startswith(module_name + os.path.sep):
-                    content = zip.read(name)
-                    dest = os.path.join(package_path, name)
-                    if not os.path.exists(os.path.dirname(dest)):
-                        os.makedirs(os.path.dirname(dest))
-                    if not content and dest.endswith(os.path.sep):
-                        if not os.path.exists(dest):
-                            os.makedirs(dest)
-                    else:
-                        f = open(dest, 'wb')
-                        f.write(content)
-                        f.close()
-                else:
-                    to_save.append((name, zip.read(name)))
-            zip.close()
-            if not to_save:
-                logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
-                os.unlink(zip_filename)
-                self.remove_filename_from_pth(zip_filename)
-            else:
-                logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
-                zip = zipfile.ZipFile(zip_filename, 'w')
-                for name, content in to_save:
-                    zip.writestr(name, content)
-                zip.close()
-        finally:
-            logger.indent -= 2
-
-    def zip_package(self, module_name, filename, no_pyc):
-        orig_filename = filename
-        logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
-        logger.indent += 2
-        if filename.endswith('.egg'):
-            dest_filename = filename
-        else:
-            dest_filename = filename + '.zip'
-        try:
-            ## FIXME: I think this needs to be undoable:
-            if filename == dest_filename:
-                filename = backup_dir(orig_filename)
-                logger.notify('Moving %s aside to %s' % (orig_filename, filename))
-                if not self.simulate:
-                    shutil.move(orig_filename, filename)
-            try:
-                logger.info('Creating zip file in %s' % display_path(dest_filename))
-                if not self.simulate:
-                    zip = zipfile.ZipFile(dest_filename, 'w')
-                    zip.writestr(module_name + '/', '')
-                    for dirpath, dirnames, filenames in os.walk(filename):
-                        if no_pyc:
-                            filenames = [f for f in filenames
-                                         if not f.lower().endswith('.pyc')]
-                        for fns, is_dir in [(dirnames, True), (filenames, False)]:
-                            for fn in fns:
-                                full = os.path.join(dirpath, fn)
-                                dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
-                                if is_dir:
-                                    zip.writestr(dest+'/', '')
-                                else:
-                                    zip.write(full, dest)
-                    zip.close()
-                logger.info('Removing old directory %s' % display_path(filename))
-                if not self.simulate:
-                    rmtree(filename)
-            except:
-                ## FIXME: need to do an undo here
-                raise
-            ## FIXME: should also be undone:
-            self.add_filename_to_pth(dest_filename)
-        finally:
-            logger.indent -= 2
-
-    def remove_filename_from_pth(self, filename):
-        for pth in self.pth_files():
-            f = open(pth, 'r')
-            lines = f.readlines()
-            f.close()
-            new_lines = [
-                l for l in lines if l.strip() != filename]
-            if lines != new_lines:
-                logger.info('Removing reference to %s from .pth file %s'
-                            % (display_path(filename), display_path(pth)))
-                if not [line for line in new_lines if line]:
-                    logger.info('%s file would be empty: deleting' % display_path(pth))
-                    if not self.simulate:
-                        os.unlink(pth)
-                else:
-                    if not self.simulate:
-                        f = open(pth, 'wb')
-                        f.writelines(new_lines)
-                        f.close()
-                return
-        logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
-
-    def add_filename_to_pth(self, filename):
-        path = os.path.dirname(filename)
-        dest = os.path.join(path, filename + '.pth')
-        if path not in self.paths():
-            logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
-        if not self.simulate:
-            if os.path.exists(dest):
-                f = open(dest)
-                lines = f.readlines()
-                f.close()
-                if lines and not lines[-1].endswith('\n'):
-                    lines[-1] += '\n'
-                lines.append(filename+'\n')
-            else:
-                lines = [filename + '\n']
-            f = open(dest, 'wb')
-            f.writelines(lines)
-            f.close()
-
-    def pth_files(self):
-        for path in self.paths():
-            if not os.path.exists(path) or not os.path.isdir(path):
-                continue
-            for filename in os.listdir(path):
-                if filename.endswith('.pth'):
-                    yield os.path.join(path, filename)
-
-    def find_package(self, package):
-        for path in self.paths():
-            full = os.path.join(path, package)
-            if os.path.exists(full):
-                return package, full
-            if not os.path.isdir(path) and zipfile.is_zipfile(path):
-                zip = zipfile.ZipFile(path, 'r')
-                try:
-                    zip.read(os.path.join(package, '__init__.py'))
-                except KeyError:
-                    pass
-                else:
-                    zip.close()
-                    return package, full
-                zip.close()
-        ## FIXME: need special error for package.py case:
-        raise InstallationError(
-            'No package with the name %s found' % package)
-
-    def list(self, options, args):
-        if args:
-            raise InstallationError(
-                'You cannot give an argument with --list')
-        for path in sorted(self.paths()):
-            if not os.path.exists(path):
-                continue
-            basename = os.path.basename(path.rstrip(os.path.sep))
-            if os.path.isfile(path) and zipfile.is_zipfile(path):
-                if os.path.dirname(path) not in self.paths():
-                    logger.notify('Zipped egg: %s' % display_path(path))
-                continue
-            if (basename != 'site-packages' and basename != 'dist-packages'
-                and not path.replace('\\', '/').endswith('lib/python')):
-                continue
-            logger.notify('In %s:' % display_path(path))
-            logger.indent += 2
-            zipped = []
-            unzipped = []
-            try:
-                for filename in sorted(os.listdir(path)):
-                    ext = os.path.splitext(filename)[1].lower()
-                    if ext in ('.pth', '.egg-info', '.egg-link'):
-                        continue
-                    if ext == '.py':
-                        logger.info('Not displaying %s: not a package' % display_path(filename))
-                        continue
-                    full = os.path.join(path, filename)
-                    if os.path.isdir(full):
-                        unzipped.append((filename, self.count_package(full)))
-                    elif zipfile.is_zipfile(full):
-                        zipped.append(filename)
-                    else:
-                        logger.info('Unknown file: %s' % display_path(filename))
-                if zipped:
-                    logger.notify('Zipped packages:')
-                    logger.indent += 2
-                    try:
-                        for filename in zipped:
-                            logger.notify(filename)
-                    finally:
-                        logger.indent -= 2
-                else:
-                    logger.notify('No zipped packages.')
-                if unzipped:
-                    if options.sort_files:
-                        unzipped.sort(key=lambda x: -x[1])
-                    logger.notify('Unzipped packages:')
-                    logger.indent += 2
-                    try:
-                        for filename, count in unzipped:
-                            logger.notify('%s  (%i files)' % (filename, count))
-                    finally:
-                        logger.indent -= 2
-                else:
-                    logger.notify('No unzipped packages.')
-            finally:
-                logger.indent -= 2
-
-    def count_package(self, path):
-        total = 0
-        for dirpath, dirnames, filenames in os.walk(path):
-            filenames = [f for f in filenames
-                         if not f.lower().endswith('.pyc')]
-            total += len(filenames)
-        return total
-
-
-ZipCommand()
diff --git a/vendor/pip-1.2.1/pip/download.py b/vendor/pip-1.2.1/pip/download.py
deleted file mode 100644
index a31e5d6709fc09a1f0153775a280268130adea6d..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/download.py
+++ /dev/null
@@ -1,481 +0,0 @@
-import cgi
-import getpass
-import mimetypes
-import os
-import re
-import shutil
-import sys
-import tempfile
-from pip.backwardcompat import (md5, copytree, xmlrpclib, urllib, urllib2,
-                                urlparse, string_types, HTTPError)
-from pip.exceptions import InstallationError
-from pip.util import (splitext, rmtree, format_size, display_path,
-                      backup_dir, ask, ask_path_exists, unpack_file,
-                      create_download_cache_folder, cache_download)
-from pip.vcs import vcs
-from pip.log import logger
-
-
-__all__ = ['xmlrpclib_transport', 'get_file_content', 'urlopen',
-           'is_url', 'url_to_path', 'path_to_url', 'path_to_url2',
-           'geturl', 'is_archive_file', 'unpack_vcs_link',
-           'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']
-
-
-xmlrpclib_transport = xmlrpclib.Transport()
-
-
-def get_file_content(url, comes_from=None):
-    """Gets the content of a file; it may be a filename, file: URL, or
-    http: URL.  Returns (location, content)"""
-    match = _scheme_re.search(url)
-    if match:
-        scheme = match.group(1).lower()
-        if (scheme == 'file' and comes_from
-            and comes_from.startswith('http')):
-            raise InstallationError(
-                'Requirements file %s references URL %s, which is local'
-                % (comes_from, url))
-        if scheme == 'file':
-            path = url.split(':', 1)[1]
-            path = path.replace('\\', '/')
-            match = _url_slash_drive_re.match(path)
-            if match:
-                path = match.group(1) + ':' + path.split('|', 1)[1]
-            path = urllib.unquote(path)
-            if path.startswith('/'):
-                path = '/' + path.lstrip('/')
-            url = path
-        else:
-            ## FIXME: catch some errors
-            resp = urlopen(url)
-            return geturl(resp), resp.read()
-    try:
-        f = open(url)
-        content = f.read()
-    except IOError:
-        e = sys.exc_info()[1]
-        raise InstallationError('Could not open requirements file: %s' % str(e))
-    else:
-        f.close()
-    return url, content
-
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
-
-
-class URLOpener(object):
-    """
-    pip's own URL helper that adds HTTP auth and proxy support
-    """
-    def __init__(self):
-        self.passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
-
-    def __call__(self, url):
-        """
-        If the given url contains auth info or if a normal request gets a 401
-        response, an attempt is made to fetch the resource using basic HTTP
-        auth.
-
-        """
-        url, username, password = self.extract_credentials(url)
-        if username is None:
-            try:
-                response = urllib2.urlopen(self.get_request(url))
-            except urllib2.HTTPError:
-                e = sys.exc_info()[1]
-                if e.code != 401:
-                    raise
-                response = self.get_response(url)
-        else:
-            response = self.get_response(url, username, password)
-        return response
-
-    def get_request(self, url):
-        """
-        Wraps the URL to retrieve to protects against "creative"
-        interpretation of the RFC: http://bugs.python.org/issue8732
-        """
-        if isinstance(url, string_types):
-            url = urllib2.Request(url, headers={'Accept-encoding': 'identity'})
-        return url
-
-    def get_response(self, url, username=None, password=None):
-        """
-        does the dirty work of actually getting the rsponse object using urllib2
-        and its HTTP auth builtins.
-        """
-        scheme, netloc, path, query, frag = urlparse.urlsplit(url)
-        req = self.get_request(url)
-
-        stored_username, stored_password = self.passman.find_user_password(None, netloc)
-        # see if we have a password stored
-        if stored_username is None:
-            if username is None and self.prompting:
-                username = urllib.quote(raw_input('User for %s: ' % netloc))
-                password = urllib.quote(getpass.getpass('Password: '))
-            if username and password:
-                self.passman.add_password(None, netloc, username, password)
-            stored_username, stored_password = self.passman.find_user_password(None, netloc)
-        authhandler = urllib2.HTTPBasicAuthHandler(self.passman)
-        opener = urllib2.build_opener(authhandler)
-        # FIXME: should catch a 401 and offer to let the user reenter credentials
-        return opener.open(req)
-
-    def setup(self, proxystr='', prompting=True):
-        """
-        Sets the proxy handler given the option passed on the command
-        line.  If an empty string is passed it looks at the HTTP_PROXY
-        environment variable.
-        """
-        self.prompting = prompting
-        proxy = self.get_proxy(proxystr)
-        if proxy:
-            proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy, "https": proxy})
-            opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
-            urllib2.install_opener(opener)
-
-    def parse_credentials(self, netloc):
-        if "@" in netloc:
-            userinfo = netloc.rsplit("@", 1)[0]
-            if ":" in userinfo:
-                return userinfo.split(":", 1)
-            return userinfo, None
-        return None, None
-
-    def extract_credentials(self, url):
-        """
-        Extracts user/password from a url.
-
-        Returns a tuple:
-            (url-without-auth, username, password)
-        """
-        if isinstance(url, urllib2.Request):
-            result = urlparse.urlsplit(url.get_full_url())
-        else:
-            result = urlparse.urlsplit(url)
-        scheme, netloc, path, query, frag = result
-
-        username, password = self.parse_credentials(netloc)
-        if username is None:
-            return url, None, None
-        elif password is None and self.prompting:
-            # remove the auth credentials from the url part
-            netloc = netloc.replace('%s@' % username, '', 1)
-            # prompt for the password
-            prompt = 'Password for %s@%s: ' % (username, netloc)
-            password = urllib.quote(getpass.getpass(prompt))
-        else:
-            # remove the auth credentials from the url part
-            netloc = netloc.replace('%s:%s@' % (username, password), '', 1)
-
-        target_url = urlparse.urlunsplit((scheme, netloc, path, query, frag))
-        return target_url, username, password
-
-    def get_proxy(self, proxystr=''):
-        """
-        Get the proxy given the option passed on the command line.
-        If an empty string is passed it looks at the HTTP_PROXY
-        environment variable.
-        """
-        if not proxystr:
-            proxystr = os.environ.get('HTTP_PROXY', '')
-        if proxystr:
-            if '@' in proxystr:
-                user_password, server_port = proxystr.split('@', 1)
-                if ':' in user_password:
-                    user, password = user_password.split(':', 1)
-                else:
-                    user = user_password
-                    prompt = 'Password for %s@%s: ' % (user, server_port)
-                    password = urllib.quote(getpass.getpass(prompt))
-                return '%s:%s@%s' % (user, password, server_port)
-            else:
-                return proxystr
-        else:
-            return None
-
-urlopen = URLOpener()
-
-
-def is_url(name):
-    """Returns true if the name looks like a URL"""
-    if ':' not in name:
-        return False
-    scheme = name.split(':', 1)[0].lower()
-    return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
-
-
-def url_to_path(url):
-    """
-    Convert a file: URL to a path.
-    """
-    assert url.startswith('file:'), (
-        "You can only turn file: urls into filenames (not %r)" % url)
-    path = url[len('file:'):].lstrip('/')
-    path = urllib.unquote(path)
-    if _url_drive_re.match(path):
-        path = path[0] + ':' + path[2:]
-    else:
-        path = '/' + path
-    return path
-
-
-_drive_re = re.compile('^([a-z]):', re.I)
-_url_drive_re = re.compile('^([a-z])[:|]', re.I)
-
-
-def path_to_url(path):
-    """
-    Convert a path to a file: URL.  The path will be made absolute.
-    """
-    path = os.path.normcase(os.path.abspath(path))
-    if _drive_re.match(path):
-        path = path[0] + '|' + path[2:]
-    url = urllib.quote(path)
-    url = url.replace(os.path.sep, '/')
-    url = url.lstrip('/')
-    return 'file:///' + url
-
-
-def path_to_url2(path):
-    """
-    Convert a path to a file: URL.  The path will be made absolute and have
-    quoted path parts.
-    """
-    path = os.path.normpath(os.path.abspath(path))
-    drive, path = os.path.splitdrive(path)
-    filepath = path.split(os.path.sep)
-    url = '/'.join([urllib.quote(part) for part in filepath])
-    if not drive:
-        url = url.lstrip('/')
-    return 'file:///' + drive + url
-
-
-def geturl(urllib2_resp):
-    """
-    Use instead of urllib.addinfourl.geturl(), which appears to have
-    some issues with dropping the double slash for certain schemes
-    (e.g. file://).  This implementation is probably over-eager, as it
-    always restores '://' if it is missing, and it appears some url
-    schemata aren't always followed by '//' after the colon, but as
-    far as I know pip doesn't need any of those.
-    The URI RFC can be found at: http://tools.ietf.org/html/rfc1630
-
-    This function assumes that
-        scheme:/foo/bar
-    is the same as
-        scheme:///foo/bar
-    """
-    url = urllib2_resp.geturl()
-    scheme, rest = url.split(':', 1)
-    if rest.startswith('//'):
-        return url
-    else:
-        # FIXME: write a good test to cover it
-        return '%s://%s' % (scheme, rest)
-
-
-def is_archive_file(name):
-    """Return True if `name` is a considered as an archive file."""
-    archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle')
-    ext = splitext(name)[1].lower()
-    if ext in archives:
-        return True
-    return False
-
-
-def unpack_vcs_link(link, location, only_download=False):
-    vcs_backend = _get_used_vcs_backend(link)
-    if only_download:
-        vcs_backend.export(location)
-    else:
-        vcs_backend.unpack(location)
-
-
-def unpack_file_url(link, location):
-    source = url_to_path(link.url)
-    content_type = mimetypes.guess_type(source)[0]
-    if os.path.isdir(source):
-        # delete the location since shutil will create it again :(
-        if os.path.isdir(location):
-            rmtree(location)
-        copytree(source, location)
-    else:
-        unpack_file(source, location, content_type, link)
-
-
-def _get_used_vcs_backend(link):
-    for backend in vcs.backends:
-        if link.scheme in backend.schemes:
-            vcs_backend = backend(link.url)
-            return vcs_backend
-
-
-def is_vcs_url(link):
-    return bool(_get_used_vcs_backend(link))
-
-
-def is_file_url(link):
-    return link.url.lower().startswith('file:')
-
-
-def _check_md5(download_hash, link):
-    download_hash = download_hash.hexdigest()
-    if download_hash != link.md5_hash:
-        logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
-                     % (link, download_hash, link.md5_hash))
-        raise InstallationError('Bad MD5 hash for package %s' % link)
-
-
-def _get_md5_from_file(target_file, link):
-    download_hash = md5()
-    fp = open(target_file, 'rb')
-    while True:
-        chunk = fp.read(4096)
-        if not chunk:
-            break
-        download_hash.update(chunk)
-    fp.close()
-    return download_hash
-
-
-def _download_url(resp, link, temp_location):
-    fp = open(temp_location, 'wb')
-    download_hash = None
-    if link.md5_hash:
-        download_hash = md5()
-    try:
-        total_length = int(resp.info()['content-length'])
-    except (ValueError, KeyError, TypeError):
-        total_length = 0
-    downloaded = 0
-    show_progress = total_length > 40*1000 or not total_length
-    show_url = link.show_url
-    try:
-        if show_progress:
-            ## FIXME: the URL can get really long in this message:
-            if total_length:
-                logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
-            else:
-                logger.start_progress('Downloading %s (unknown size): ' % show_url)
-        else:
-            logger.notify('Downloading %s' % show_url)
-        logger.debug('Downloading from URL %s' % link)
-
-        while True:
-            chunk = resp.read(4096)
-            if not chunk:
-                break
-            downloaded += len(chunk)
-            if show_progress:
-                if not total_length:
-                    logger.show_progress('%s' % format_size(downloaded))
-                else:
-                    logger.show_progress('%3i%%  %s' % (100*downloaded/total_length, format_size(downloaded)))
-            if link.md5_hash:
-                download_hash.update(chunk)
-            fp.write(chunk)
-        fp.close()
-    finally:
-        if show_progress:
-            logger.end_progress('%s downloaded' % format_size(downloaded))
-    return download_hash
-
-
-def _copy_file(filename, location, content_type, link):
-    copy = True
-    download_location = os.path.join(location, link.filename)
-    if os.path.exists(download_location):
-        response = ask_path_exists(
-            'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
-            display_path(download_location), ('i', 'w', 'b'))
-        if response == 'i':
-            copy = False
-        elif response == 'w':
-            logger.warn('Deleting %s' % display_path(download_location))
-            os.remove(download_location)
-        elif response == 'b':
-            dest_file = backup_dir(download_location)
-            logger.warn('Backing up %s to %s'
-                        % (display_path(download_location), display_path(dest_file)))
-            shutil.move(download_location, dest_file)
-    if copy:
-        shutil.copy(filename, download_location)
-        logger.indent -= 2
-        logger.notify('Saved %s' % display_path(download_location))
-
-
-def unpack_http_url(link, location, download_cache, download_dir=None):
-    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
-    target_url = link.url.split('#', 1)[0]
-    target_file = None
-    download_hash = None
-    if download_cache:
-        target_file = os.path.join(download_cache,
-                                   urllib.quote(target_url, ''))
-        if not os.path.isdir(download_cache):
-            create_download_cache_folder(download_cache)
-    if (target_file
-        and os.path.exists(target_file)
-        and os.path.exists(target_file + '.content-type')):
-        fp = open(target_file+'.content-type')
-        content_type = fp.read().strip()
-        fp.close()
-        if link.md5_hash:
-            download_hash = _get_md5_from_file(target_file, link)
-        temp_location = target_file
-        logger.notify('Using download cache from %s' % target_file)
-    else:
-        resp = _get_response_from_url(target_url, link)
-        content_type = resp.info()['content-type']
-        filename = link.filename  # fallback
-        # Have a look at the Content-Disposition header for a better guess
-        content_disposition = resp.info().get('content-disposition')
-        if content_disposition:
-            type, params = cgi.parse_header(content_disposition)
-            # We use ``or`` here because we don't want to use an "empty" value
-            # from the filename param.
-            filename = params.get('filename') or filename
-        ext = splitext(filename)[1]
-        if not ext:
-            ext = mimetypes.guess_extension(content_type)
-            if ext:
-                filename += ext
-        if not ext and link.url != geturl(resp):
-            ext = os.path.splitext(geturl(resp))[1]
-            if ext:
-                filename += ext
-        temp_location = os.path.join(temp_dir, filename)
-        download_hash = _download_url(resp, link, temp_location)
-    if link.md5_hash:
-        _check_md5(download_hash, link)
-    if download_dir:
-        _copy_file(temp_location, download_dir, content_type, link)
-    unpack_file(temp_location, location, content_type, link)
-    if target_file and target_file != temp_location:
-        cache_download(target_file, temp_location, content_type)
-    if target_file is None:
-        os.unlink(temp_location)
-    os.rmdir(temp_dir)
-
-
-def _get_response_from_url(target_url, link):
-    try:
-        resp = urlopen(target_url)
-    except urllib2.HTTPError:
-        e = sys.exc_info()[1]
-        logger.fatal("HTTP error %s while getting %s" % (e.code, link))
-        raise
-    except IOError:
-        e = sys.exc_info()[1]
-        # Typically an FTP error
-        logger.fatal("Error %s while getting %s" % (e, link))
-        raise
-    return resp
-
-
-class Urllib2HeadRequest(urllib2.Request):
-    def get_method(self):
-        return "HEAD"
diff --git a/vendor/pip-1.2.1/pip/exceptions.py b/vendor/pip-1.2.1/pip/exceptions.py
deleted file mode 100644
index 22f554a76ea1bbf837485ecabf85db3d6261e0d8..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/exceptions.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""Exceptions used throughout package"""
-
-
-class InstallationError(Exception):
-    """General exception during installation"""
-
-
-class UninstallationError(Exception):
-    """General exception during uninstallation"""
-
-
-class DistributionNotFound(InstallationError):
-    """Raised when a distribution cannot be found to satisfy a requirement"""
-
-
-class BestVersionAlreadyInstalled(Exception):
-    """Raised when the most up-to-date version of a package is already
-    installed.
-    """
-
-
-class BadCommand(Exception):
-    """Raised when virtualenv or a command is not found"""
-
-
-class CommandError(Exception):
-    """Raised when there is an error in command-line arguments"""
diff --git a/vendor/pip-1.2.1/pip/index.py b/vendor/pip-1.2.1/pip/index.py
deleted file mode 100644
index 8e53e44b7dcc3ce578f79606ced3f27bb9b0192b..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/index.py
+++ /dev/null
@@ -1,708 +0,0 @@
-"""Routines related to PyPI, indexes"""
-
-import sys
-import os
-import re
-import gzip
-import mimetypes
-try:
-    import threading
-except ImportError:
-    import dummy_threading as threading
-import posixpath
-import pkg_resources
-import random
-import socket
-import string
-import zlib
-from pip.log import logger
-from pip.util import Inf
-from pip.util import normalize_name, splitext
-from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
-from pip.backwardcompat import (WindowsError, BytesIO,
-                                Queue, httplib, urlparse,
-                                URLError, HTTPError, u,
-                                product, url2pathname)
-from pip.backwardcompat import Empty as QueueEmpty
-from pip.download import urlopen, path_to_url2, url_to_path, geturl, Urllib2HeadRequest
-
-__all__ = ['PackageFinder']
-
-
-DEFAULT_MIRROR_URL = "last.pypi.python.org"
-
-
-class PackageFinder(object):
-    """This finds packages.
-
-    This is meant to match easy_install's technique for looking for
-    packages, by reading pages and looking for appropriate links
-    """
-
-    def __init__(self, find_links, index_urls,
-            use_mirrors=False, mirrors=None, main_mirror_url=None):
-        self.find_links = find_links
-        self.index_urls = index_urls
-        self.dependency_links = []
-        self.cache = PageCache()
-        # These are boring links that have already been logged somehow:
-        self.logged_links = set()
-        if use_mirrors:
-            self.mirror_urls = self._get_mirror_urls(mirrors, main_mirror_url)
-            logger.info('Using PyPI mirrors: %s' % ', '.join(self.mirror_urls))
-        else:
-            self.mirror_urls = []
-
-    def add_dependency_links(self, links):
-        ## FIXME: this shouldn't be global list this, it should only
-        ## apply to requirements of the package that specifies the
-        ## dependency_links value
-        ## FIXME: also, we should track comes_from (i.e., use Link)
-        self.dependency_links.extend(links)
-
-    @staticmethod
-    def _sort_locations(locations):
-        """
-        Sort locations into "files" (archives) and "urls", and return
-        a pair of lists (files,urls)
-        """
-        files = []
-        urls = []
-
-        # puts the url for the given file path into the appropriate
-        # list
-        def sort_path(path):
-            url = path_to_url2(path)
-            if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
-                urls.append(url)
-            else:
-                files.append(url)
-
-        for url in locations:
-            if url.startswith('file:'):
-                path = url_to_path(url)
-                if os.path.isdir(path):
-                    path = os.path.realpath(path)
-                    for item in os.listdir(path):
-                        sort_path(os.path.join(path, item))
-                elif os.path.isfile(path):
-                    sort_path(path)
-            else:
-                urls.append(url)
-        return files, urls
-
-    def find_requirement(self, req, upgrade):
-        url_name = req.url_name
-        # Only check main index if index URL is given:
-        main_index_url = None
-        if self.index_urls:
-            # Check that we have the url_name correctly spelled:
-            main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
-            # This will also cache the page, so it's okay that we get it again later:
-            page = self._get_page(main_index_url, req)
-            if page is None:
-                url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name
-
-        # Combine index URLs with mirror URLs here to allow
-        # adding more index URLs from requirements files
-        all_index_urls = self.index_urls + self.mirror_urls
-
-        def mkurl_pypi_url(url):
-            loc = posixpath.join(url, url_name)
-            # For maximum compatibility with easy_install, ensure the path
-            # ends in a trailing slash.  Although this isn't in the spec
-            # (and PyPI can handle it without the slash) some other index
-            # implementations might break if they relied on easy_install's behavior.
-            if not loc.endswith('/'):
-                loc = loc + '/'
-            return loc
-        if url_name is not None:
-            locations = [
-                mkurl_pypi_url(url)
-                for url in all_index_urls] + self.find_links
-        else:
-            locations = list(self.find_links)
-        locations.extend(self.dependency_links)
-        for version in req.absolute_versions:
-            if url_name is not None and main_index_url is not None:
-                locations = [
-                    posixpath.join(main_index_url.url, version)] + locations
-
-        file_locations, url_locations = self._sort_locations(locations)
-
-        locations = [Link(url) for url in url_locations]
-        logger.debug('URLs to search for versions for %s:' % req)
-        for location in locations:
-            logger.debug('* %s' % location)
-        found_versions = []
-        found_versions.extend(
-            self._package_versions(
-                [Link(url, '-f') for url in self.find_links], req.name.lower()))
-        page_versions = []
-        for page in self._get_pages(locations, req):
-            logger.debug('Analyzing links from page %s' % page.url)
-            logger.indent += 2
-            try:
-                page_versions.extend(self._package_versions(page.links, req.name.lower()))
-            finally:
-                logger.indent -= 2
-        dependency_versions = list(self._package_versions(
-            [Link(url) for url in self.dependency_links], req.name.lower()))
-        if dependency_versions:
-            logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
-        file_versions = list(self._package_versions(
-                [Link(url) for url in file_locations], req.name.lower()))
-        if not found_versions and not page_versions and not dependency_versions and not file_versions:
-            logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
-            raise DistributionNotFound('No distributions at all found for %s' % req)
-        if req.satisfied_by is not None:
-            found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
-        if file_versions:
-            file_versions.sort(reverse=True)
-            logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
-            found_versions = file_versions + found_versions
-        all_versions = found_versions + page_versions + dependency_versions
-        applicable_versions = []
-        for (parsed_version, link, version) in all_versions:
-            if version not in req.req:
-                logger.info("Ignoring link %s, version %s doesn't match %s"
-                            % (link, version, ','.join([''.join(s) for s in req.req.specs])))
-                continue
-            applicable_versions.append((link, version))
-        applicable_versions = sorted(applicable_versions, key=lambda v: pkg_resources.parse_version(v[1]), reverse=True)
-        existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
-        if not upgrade and existing_applicable:
-            if applicable_versions[0][1] is Inf:
-                logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
-                            % req.satisfied_by.version)
-                raise BestVersionAlreadyInstalled
-            else:
-                logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
-                            % (req.satisfied_by.version, applicable_versions[0][1]))
-            return None
-        if not applicable_versions:
-            logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
-                         % (req, ', '.join([version for parsed_version, link, version in found_versions])))
-            raise DistributionNotFound('No distributions matching the version for %s' % req)
-        if applicable_versions[0][0] is Inf:
-            # We have an existing version, and its the best version
-            logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
-                        % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
-            raise BestVersionAlreadyInstalled
-        if len(applicable_versions) > 1:
-            logger.info('Using version %s (newest of versions: %s)' %
-                        (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
-        return applicable_versions[0][0]
-
-    def _find_url_name(self, index_url, url_name, req):
-        """Finds the true URL name of a package, when the given name isn't quite correct.
-        This is usually used to implement case-insensitivity."""
-        if not index_url.url.endswith('/'):
-            # Vaguely part of the PyPI API... weird but true.
-            ## FIXME: bad to modify this?
-            index_url.url += '/'
-        page = self._get_page(index_url, req)
-        if page is None:
-            logger.fatal('Cannot fetch index base URL %s' % index_url)
-            return
-        norm_name = normalize_name(req.url_name)
-        for link in page.links:
-            base = posixpath.basename(link.path.rstrip('/'))
-            if norm_name == normalize_name(base):
-                logger.notify('Real name of requirement %s is %s' % (url_name, base))
-                return base
-        return None
-
-    def _get_pages(self, locations, req):
-        """Yields (page, page_url) from the given locations, skipping
-        locations that have errors, and adding download/homepage links"""
-        pending_queue = Queue()
-        for location in locations:
-            pending_queue.put(location)
-        done = []
-        seen = set()
-        threads = []
-        for i in range(min(10, len(locations))):
-            t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
-            t.setDaemon(True)
-            threads.append(t)
-            t.start()
-        for t in threads:
-            t.join()
-        return done
-
-    _log_lock = threading.Lock()
-
-    def _get_queued_page(self, req, pending_queue, done, seen):
-        while 1:
-            try:
-                location = pending_queue.get(False)
-            except QueueEmpty:
-                return
-            if location in seen:
-                continue
-            seen.add(location)
-            page = self._get_page(location, req)
-            if page is None:
-                continue
-            done.append(page)
-            for link in page.rel_links():
-                pending_queue.put(link)
-
-    _egg_fragment_re = re.compile(r'#egg=([^&]*)')
-    _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
-    _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
-
-    def _sort_links(self, links):
-        "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
-        eggs, no_eggs = [], []
-        seen = set()
-        for link in links:
-            if link not in seen:
-                seen.add(link)
-                if link.egg_fragment:
-                    eggs.append(link)
-                else:
-                    no_eggs.append(link)
-        return no_eggs + eggs
-
-    def _package_versions(self, links, search_name):
-        for link in self._sort_links(links):
-            for v in self._link_package_versions(link, search_name):
-                yield v
-
-    def _link_package_versions(self, link, search_name):
-        """
-        Return an iterable of triples (pkg_resources_version_key,
-        link, python_version) that can be extracted from the given
-        link.
-
-        Meant to be overridden by subclasses, not called by clients.
-        """
-        if link.egg_fragment:
-            egg_info = link.egg_fragment
-        else:
-            egg_info, ext = link.splitext()
-            if not ext:
-                if link not in self.logged_links:
-                    logger.debug('Skipping link %s; not a file' % link)
-                    self.logged_links.add(link)
-                return []
-            if egg_info.endswith('.tar'):
-                # Special double-extension case:
-                egg_info = egg_info[:-4]
-                ext = '.tar' + ext
-            if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
-                if link not in self.logged_links:
-                    logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
-                    self.logged_links.add(link)
-                return []
-            if "macosx10" in link.path and ext == '.zip':
-                if link not in self.logged_links:
-                    logger.debug('Skipping link %s; macosx10 one' % (link))
-                    self.logged_links.add(link)
-                return []
-        version = self._egg_info_matches(egg_info, search_name, link)
-        if version is None:
-            logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
-            return []
-        match = self._py_version_re.search(version)
-        if match:
-            version = version[:match.start()]
-            py_version = match.group(1)
-            if py_version != sys.version[:3]:
-                logger.debug('Skipping %s because Python version is incorrect' % link)
-                return []
-        logger.debug('Found link %s, version: %s' % (link, version))
-        return [(pkg_resources.parse_version(version),
-               link,
-               version)]
-
-    def _egg_info_matches(self, egg_info, search_name, link):
-        match = self._egg_info_re.search(egg_info)
-        if not match:
-            logger.debug('Could not parse version from link: %s' % link)
-            return None
-        name = match.group(0).lower()
-        # To match the "safe" name that pkg_resources creates:
-        name = name.replace('_', '-')
-        if name.startswith(search_name.lower()):
-            return match.group(0)[len(search_name):].lstrip('-')
-        else:
-            return None
-
-    def _get_page(self, link, req):
-        return HTMLPage.get_page(link, req, cache=self.cache)
-
-    def _get_mirror_urls(self, mirrors=None, main_mirror_url=None):
-        """Retrieves a list of URLs from the main mirror DNS entry
-        unless a list of mirror URLs are passed.
-        """
-        if not mirrors:
-            mirrors = get_mirrors(main_mirror_url)
-            # Should this be made "less random"? E.g. netselect like?
-            random.shuffle(mirrors)
-
-        mirror_urls = set()
-        for mirror_url in mirrors:
-            # Make sure we have a valid URL
-            if not ("http://" or "https://" or "file://") in mirror_url:
-                mirror_url = "http://%s" % mirror_url
-            if not mirror_url.endswith("/simple"):
-                mirror_url = "%s/simple/" % mirror_url
-            mirror_urls.add(mirror_url)
-
-        return list(mirror_urls)
-
-
-class PageCache(object):
-    """Cache of HTML pages"""
-
-    failure_limit = 3
-
-    def __init__(self):
-        self._failures = {}
-        self._pages = {}
-        self._archives = {}
-
-    def too_many_failures(self, url):
-        return self._failures.get(url, 0) >= self.failure_limit
-
-    def get_page(self, url):
-        return self._pages.get(url)
-
-    def is_archive(self, url):
-        return self._archives.get(url, False)
-
-    def set_is_archive(self, url, value=True):
-        self._archives[url] = value
-
-    def add_page_failure(self, url, level):
-        self._failures[url] = self._failures.get(url, 0)+level
-
-    def add_page(self, urls, page):
-        for url in urls:
-            self._pages[url] = page
-
-
-class HTMLPage(object):
-    """Represents one page, along with its URL"""
-
-    ## FIXME: these regexes are horrible hacks:
-    _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
-    _download_re = re.compile(r'<th>\s*download\s+url', re.I)
-    ## These aren't so aweful:
-    _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
-    _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
-    _base_re = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I)
-
-    def __init__(self, content, url, headers=None):
-        self.content = content
-        self.url = url
-        self.headers = headers
-
-    def __str__(self):
-        return self.url
-
-    @classmethod
-    def get_page(cls, link, req, cache=None, skip_archives=True):
-        url = link.url
-        url = url.split('#', 1)[0]
-        if cache.too_many_failures(url):
-            return None
-
-        # Check for VCS schemes that do not support lookup as web pages.
-        from pip.vcs import VcsSupport
-        for scheme in VcsSupport.schemes:
-            if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
-                logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
-                return None
-
-        if cache is not None:
-            inst = cache.get_page(url)
-            if inst is not None:
-                return inst
-        try:
-            if skip_archives:
-                if cache is not None:
-                    if cache.is_archive(url):
-                        return None
-                filename = link.filename
-                for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
-                    if filename.endswith(bad_ext):
-                        content_type = cls._get_content_type(url)
-                        if content_type.lower().startswith('text/html'):
-                            break
-                        else:
-                            logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
-                            if cache is not None:
-                                cache.set_is_archive(url)
-                            return None
-            logger.debug('Getting page %s' % url)
-
-            # Tack index.html onto file:// URLs that point to directories
-            (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
-            if scheme == 'file' and os.path.isdir(url2pathname(path)):
-                # add trailing slash if not present so urljoin doesn't trim final segment
-                if not url.endswith('/'):
-                    url += '/'
-                url = urlparse.urljoin(url, 'index.html')
-                logger.debug(' file: URL is directory, getting %s' % url)
-
-            resp = urlopen(url)
-
-            real_url = geturl(resp)
-            headers = resp.info()
-            contents = resp.read()
-            encoding = headers.get('Content-Encoding', None)
-            #XXX need to handle exceptions and add testing for this
-            if encoding is not None:
-                if encoding == 'gzip':
-                    contents = gzip.GzipFile(fileobj=BytesIO(contents)).read()
-                if encoding == 'deflate':
-                    contents = zlib.decompress(contents)
-            inst = cls(u(contents), real_url, headers)
-        except (HTTPError, URLError, socket.timeout, socket.error, OSError, WindowsError):
-            e = sys.exc_info()[1]
-            desc = str(e)
-            if isinstance(e, socket.timeout):
-                log_meth = logger.info
-                level =1
-                desc = 'timed out'
-            elif isinstance(e, URLError):
-                log_meth = logger.info
-                if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
-                    desc = 'timed out'
-                    level = 1
-                else:
-                    level = 2
-            elif isinstance(e, HTTPError) and e.code == 404:
-                ## FIXME: notify?
-                log_meth = logger.info
-                level = 2
-            else:
-                log_meth = logger.info
-                level = 1
-            log_meth('Could not fetch URL %s: %s' % (link, desc))
-            log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
-            if cache is not None:
-                cache.add_page_failure(url, level)
-            return None
-        if cache is not None:
-            cache.add_page([url, real_url], inst)
-        return inst
-
-    @staticmethod
-    def _get_content_type(url):
-        """Get the Content-Type of the given url, using a HEAD request"""
-        scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
-        if not scheme in ('http', 'https', 'ftp', 'ftps'):
-            ## FIXME: some warning or something?
-            ## assertion error?
-            return ''
-        req = Urllib2HeadRequest(url, headers={'Host': netloc})
-        resp = urlopen(req)
-        try:
-            if hasattr(resp, 'code') and resp.code != 200 and scheme not in ('ftp', 'ftps'):
-                ## FIXME: doesn't handle redirects
-                return ''
-            return resp.info().get('content-type', '')
-        finally:
-            resp.close()
-
-    @property
-    def base_url(self):
-        if not hasattr(self, "_base_url"):
-            match = self._base_re.search(self.content)
-            if match:
-                self._base_url = match.group(1)
-            else:
-                self._base_url = self.url
-        return self._base_url
-
-    @property
-    def links(self):
-        """Yields all links in the page"""
-        for match in self._href_re.finditer(self.content):
-            url = match.group(1) or match.group(2) or match.group(3)
-            url = self.clean_link(urlparse.urljoin(self.base_url, url))
-            yield Link(url, self)
-
-    def rel_links(self):
-        for url in self.explicit_rel_links():
-            yield url
-        for url in self.scraped_rel_links():
-            yield url
-
-    def explicit_rel_links(self, rels=('homepage', 'download')):
-        """Yields all links with the given relations"""
-        for match in self._rel_re.finditer(self.content):
-            found_rels = match.group(1).lower().split()
-            for rel in rels:
-                if rel in found_rels:
-                    break
-            else:
-                continue
-            match = self._href_re.search(match.group(0))
-            if not match:
-                continue
-            url = match.group(1) or match.group(2) or match.group(3)
-            url = self.clean_link(urlparse.urljoin(self.base_url, url))
-            yield Link(url, self)
-
-    def scraped_rel_links(self):
-        for regex in (self._homepage_re, self._download_re):
-            match = regex.search(self.content)
-            if not match:
-                continue
-            href_match = self._href_re.search(self.content, pos=match.end())
-            if not href_match:
-                continue
-            url = href_match.group(1) or href_match.group(2) or href_match.group(3)
-            if not url:
-                continue
-            url = self.clean_link(urlparse.urljoin(self.base_url, url))
-            yield Link(url, self)
-
-    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
-    def clean_link(self, url):
-        """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
-        the link, it will be rewritten to %20 (while not over-quoting
-        % or other characters)."""
-        return self._clean_re.sub(
-            lambda match: '%%%2x' % ord(match.group(0)), url)
-
-
-class Link(object):
-
-    def __init__(self, url, comes_from=None):
-        self.url = url
-        self.comes_from = comes_from
-
-    def __str__(self):
-        if self.comes_from:
-            return '%s (from %s)' % (self.url, self.comes_from)
-        else:
-            return self.url
-
-    def __repr__(self):
-        return '<Link %s>' % self
-
-    def __eq__(self, other):
-        return self.url == other.url
-
-    def __hash__(self):
-        return hash(self.url)
-
-    @property
-    def filename(self):
-        url = self.url_fragment
-        name = posixpath.basename(url)
-        assert name, ('URL %r produced no filename' % url)
-        return name
-
-    @property
-    def scheme(self):
-        return urlparse.urlsplit(self.url)[0]
-
-    @property
-    def path(self):
-        return urlparse.urlsplit(self.url)[2]
-
-    def splitext(self):
-        return splitext(posixpath.basename(self.path.rstrip('/')))
-
-    @property
-    def url_fragment(self):
-        url = self.url
-        url = url.split('#', 1)[0]
-        url = url.split('?', 1)[0]
-        url = url.rstrip('/')
-        return url
-
-    _egg_fragment_re = re.compile(r'#egg=([^&]*)')
-
-    @property
-    def egg_fragment(self):
-        match = self._egg_fragment_re.search(self.url)
-        if not match:
-            return None
-        return match.group(1)
-
-    _md5_re = re.compile(r'md5=([a-f0-9]+)')
-
-    @property
-    def md5_hash(self):
-        match = self._md5_re.search(self.url)
-        if match:
-            return match.group(1)
-        return None
-
-    @property
-    def show_url(self):
-        return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
-
-
-def get_requirement_from_url(url):
-    """Get a requirement from the URL, if possible.  This looks for #egg
-    in the URL"""
-    link = Link(url)
-    egg_info = link.egg_fragment
-    if not egg_info:
-        egg_info = splitext(link.filename)[0]
-    return package_to_requirement(egg_info)
-
-
-def package_to_requirement(package_name):
-    """Translate a name like Foo-1.2 to Foo==1.3"""
-    match = re.search(r'^(.*?)-(dev|\d.*)', package_name)
-    if match:
-        name = match.group(1)
-        version = match.group(2)
-    else:
-        name = package_name
-        version = ''
-    if version:
-        return '%s==%s' % (name, version)
-    else:
-        return name
-
-
-def get_mirrors(hostname=None):
-    """Return the list of mirrors from the last record found on the DNS
-    entry::
-
-    >>> from pip.index import get_mirrors
-    >>> get_mirrors()
-    ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
-    'd.pypi.python.org']
-
-    Originally written for the distutils2 project by Alexis Metaireau.
-    """
-    if hostname is None:
-        hostname = DEFAULT_MIRROR_URL
-
-    # return the last mirror registered on PyPI.
-    try:
-        hostname = socket.gethostbyname_ex(hostname)[0]
-    except socket.gaierror:
-        return []
-    end_letter = hostname.split(".", 1)
-
-    # determine the list from the last one.
-    return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
-
-
-def string_range(last):
-    """Compute the range of string between "a" and last.
-
-    This works for simple "a to z" lists, but also for "a to zz" lists.
-    """
-    for k in range(len(last)):
-        for x in product(string.ascii_lowercase, repeat=k+1):
-            result = ''.join(x)
-            yield result
-            if result == last:
-                return
-
diff --git a/vendor/pip-1.2.1/pip/locations.py b/vendor/pip-1.2.1/pip/locations.py
deleted file mode 100644
index 34c6dbbe65dfbefceb74aa557fad31357c9120d0..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/locations.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""Locations where we look for configs, install stuff, etc"""
-
-import sys
-import os
-from pip.backwardcompat import get_python_lib
-
-
-def running_under_virtualenv():
-    """
-    Return True if we're running inside a virtualenv, False otherwise.
-
-    """
-    return hasattr(sys, 'real_prefix')
-
-
-if running_under_virtualenv():
-    ## FIXME: is build/ a good name?
-    build_prefix = os.path.join(sys.prefix, 'build')
-    src_prefix = os.path.join(sys.prefix, 'src')
-else:
-    ## FIXME: this isn't a very good default
-    build_prefix = os.path.join(os.getcwd(), 'build')
-    src_prefix = os.path.join(os.getcwd(), 'src')
-
-# under Mac OS X + virtualenv sys.prefix is not properly resolved
-# it is something like /path/to/python/bin/..
-build_prefix = os.path.abspath(build_prefix)
-src_prefix = os.path.abspath(src_prefix)
-
-# FIXME doesn't account for venv linked to global site-packages
-
-site_packages = get_python_lib()
-user_dir = os.path.expanduser('~')
-if sys.platform == 'win32':
-    bin_py = os.path.join(sys.prefix, 'Scripts')
-    # buildout uses 'bin' on Windows too?
-    if not os.path.exists(bin_py):
-        bin_py = os.path.join(sys.prefix, 'bin')
-    user_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
-    default_storage_dir = os.path.join(user_dir, 'pip')
-    default_config_file = os.path.join(default_storage_dir, 'pip.ini')
-    default_log_file = os.path.join(default_storage_dir, 'pip.log')
-else:
-    bin_py = os.path.join(sys.prefix, 'bin')
-    default_storage_dir = os.path.join(user_dir, '.pip')
-    default_config_file = os.path.join(default_storage_dir, 'pip.conf')
-    default_log_file = os.path.join(default_storage_dir, 'pip.log')
-    # Forcing to use /usr/local/bin for standard Mac OS X framework installs
-    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
-    if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
-        bin_py = '/usr/local/bin'
-        default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
diff --git a/vendor/pip-1.2.1/pip/log.py b/vendor/pip-1.2.1/pip/log.py
deleted file mode 100644
index 63541a1c1058389d2c119cb2e0b5484597c316f3..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/log.py
+++ /dev/null
@@ -1,188 +0,0 @@
-"""Logging
-"""
-
-import sys
-import logging
-
-import pip.backwardcompat
-
-
-class Logger(object):
-
-    """
-    Logging object for use in command-line script.  Allows ranges of
-    levels, to avoid some redundancy of displayed information.
-    """
-
-    VERBOSE_DEBUG = logging.DEBUG-1
-    DEBUG = logging.DEBUG
-    INFO = logging.INFO
-    NOTIFY = (logging.INFO+logging.WARN)/2
-    WARN = WARNING = logging.WARN
-    ERROR = logging.ERROR
-    FATAL = logging.FATAL
-
-    LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
-
-    def __init__(self):
-        self.consumers = []
-        self.indent = 0
-        self.explicit_levels = False
-        self.in_progress = None
-        self.in_progress_hanging = False
-
-    def debug(self, msg, *args, **kw):
-        self.log(self.DEBUG, msg, *args, **kw)
-
-    def info(self, msg, *args, **kw):
-        self.log(self.INFO, msg, *args, **kw)
-
-    def notify(self, msg, *args, **kw):
-        self.log(self.NOTIFY, msg, *args, **kw)
-
-    def warn(self, msg, *args, **kw):
-        self.log(self.WARN, msg, *args, **kw)
-
-    def error(self, msg, *args, **kw):
-        self.log(self.WARN, msg, *args, **kw)
-
-    def fatal(self, msg, *args, **kw):
-        self.log(self.FATAL, msg, *args, **kw)
-
-    def log(self, level, msg, *args, **kw):
-        if args:
-            if kw:
-                raise TypeError(
-                    "You may give positional or keyword arguments, not both")
-        args = args or kw
-        rendered = None
-        for consumer_level, consumer in self.consumers:
-            if self.level_matches(level, consumer_level):
-                if (self.in_progress_hanging
-                    and consumer in (sys.stdout, sys.stderr)):
-                    self.in_progress_hanging = False
-                    sys.stdout.write('\n')
-                    sys.stdout.flush()
-                if rendered is None:
-                    if args:
-                        rendered = msg % args
-                    else:
-                        rendered = msg
-                    rendered = ' '*self.indent + rendered
-                    if self.explicit_levels:
-                        ## FIXME: should this be a name, not a level number?
-                        rendered = '%02i %s' % (level, rendered)
-                if hasattr(consumer, 'write'):
-                    rendered += '\n'
-                    pip.backwardcompat.fwrite(consumer, rendered)
-                else:
-                    consumer(rendered)
-
-    def _show_progress(self):
-        """Should we display download progress?"""
-        return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
-
-    def start_progress(self, msg):
-        assert not self.in_progress, (
-            "Tried to start_progress(%r) while in_progress %r"
-            % (msg, self.in_progress))
-        if self._show_progress():
-            sys.stdout.write(' '*self.indent + msg)
-            sys.stdout.flush()
-            self.in_progress_hanging = True
-        else:
-            self.in_progress_hanging = False
-        self.in_progress = msg
-        self.last_message = None
-
-    def end_progress(self, msg='done.'):
-        assert self.in_progress, (
-            "Tried to end_progress without start_progress")
-        if self._show_progress():
-            if not self.in_progress_hanging:
-                # Some message has been printed out since start_progress
-                sys.stdout.write('...' + self.in_progress + msg + '\n')
-                sys.stdout.flush()
-            else:
-                # These erase any messages shown with show_progress (besides .'s)
-                logger.show_progress('')
-                logger.show_progress('')
-                sys.stdout.write(msg + '\n')
-                sys.stdout.flush()
-        self.in_progress = None
-        self.in_progress_hanging = False
-
-    def show_progress(self, message=None):
-        """If we are in a progress scope, and no log messages have been
-        shown, write out another '.'"""
-        if self.in_progress_hanging:
-            if message is None:
-                sys.stdout.write('.')
-                sys.stdout.flush()
-            else:
-                if self.last_message:
-                    padding = ' ' * max(0, len(self.last_message)-len(message))
-                else:
-                    padding = ''
-                sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding))
-                sys.stdout.flush()
-                self.last_message = message
-
-    def stdout_level_matches(self, level):
-        """Returns true if a message at this level will go to stdout"""
-        return self.level_matches(level, self._stdout_level())
-
-    def _stdout_level(self):
-        """Returns the level that stdout runs at"""
-        for level, consumer in self.consumers:
-            if consumer is sys.stdout:
-                return level
-        return self.FATAL
-
-    def level_matches(self, level, consumer_level):
-        """
-        >>> l = Logger()
-        >>> l.level_matches(3, 4)
-        False
-        >>> l.level_matches(3, 2)
-        True
-        >>> l.level_matches(slice(None, 3), 3)
-        False
-        >>> l.level_matches(slice(None, 3), 2)
-        True
-        >>> l.level_matches(slice(1, 3), 1)
-        True
-        >>> l.level_matches(slice(2, 3), 1)
-        False
-        """
-        if isinstance(level, slice):
-            start, stop = level.start, level.stop
-            if start is not None and start > consumer_level:
-                return False
-            if stop is not None or stop <= consumer_level:
-                return False
-            return True
-        else:
-            return level >= consumer_level
-
-    @classmethod
-    def level_for_integer(cls, level):
-        levels = cls.LEVELS
-        if level < 0:
-            return levels[0]
-        if level >= len(levels):
-            return levels[-1]
-        return levels[level]
-
-    def move_stdout_to_stderr(self):
-        to_remove = []
-        to_add = []
-        for consumer_level, consumer in self.consumers:
-            if consumer == sys.stdout:
-                to_remove.append((consumer_level, consumer))
-                to_add.append((consumer_level, sys.stderr))
-        for item in to_remove:
-            self.consumers.remove(item)
-        self.consumers.extend(to_add)
-
-logger = Logger()
diff --git a/vendor/pip-1.2.1/pip/req.py b/vendor/pip-1.2.1/pip/req.py
deleted file mode 100644
index a1a69599ccbffee5b487ccb5e339b71c657065ba..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/req.py
+++ /dev/null
@@ -1,1517 +0,0 @@
-import sys
-import os
-import shutil
-import re
-import zipfile
-import pkg_resources
-import tempfile
-from pip.locations import bin_py, running_under_virtualenv
-from pip.exceptions import (InstallationError, UninstallationError,
-                            BestVersionAlreadyInstalled)
-from pip.vcs import vcs
-from pip.log import logger
-from pip.util import display_path, rmtree
-from pip.util import ask, ask_path_exists, backup_dir
-from pip.util import is_installable_dir, is_local, dist_is_local
-from pip.util import renames, normalize_path, egg_link_path
-from pip.util import make_path_relative
-from pip import call_subprocess
-from pip.backwardcompat import (any, copytree, urlparse, urllib,
-                                ConfigParser, string_types, HTTPError,
-                                FeedParser, get_python_version,
-                                b)
-from pip.index import Link
-from pip.locations import build_prefix
-from pip.download import (get_file_content, is_url, url_to_path,
-                          path_to_url, is_archive_file,
-                          unpack_vcs_link, is_vcs_url, is_file_url,
-                          unpack_file_url, unpack_http_url)
-
-
-PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
-
-
-class InstallRequirement(object):
-
-    def __init__(self, req, comes_from, source_dir=None, editable=False,
-                 url=None, update=True):
-        self.extras = ()
-        if isinstance(req, string_types):
-            req = pkg_resources.Requirement.parse(req)
-            self.extras = req.extras
-        self.req = req
-        self.comes_from = comes_from
-        self.source_dir = source_dir
-        self.editable = editable
-        self.url = url
-        self._egg_info_path = None
-        # This holds the pkg_resources.Distribution object if this requirement
-        # is already available:
-        self.satisfied_by = None
-        # This hold the pkg_resources.Distribution object if this requirement
-        # conflicts with another installed distribution:
-        self.conflicts_with = None
-        self._temp_build_dir = None
-        self._is_bundle = None
-        # True if the editable should be updated:
-        self.update = update
-        # Set to True after successful installation
-        self.install_succeeded = None
-        # UninstallPathSet of uninstalled distribution (for possible rollback)
-        self.uninstalled = None
-
-    @classmethod
-    def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
-        name, url = parse_editable(editable_req, default_vcs)
-        if url.startswith('file:'):
-            source_dir = url_to_path(url)
-        else:
-            source_dir = None
-        return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
-
-    @classmethod
-    def from_line(cls, name, comes_from=None):
-        """Creates an InstallRequirement from a name, which might be a
-        requirement, directory containing 'setup.py', filename, or URL.
-        """
-        url = None
-        name = name.strip()
-        req = None
-        path = os.path.normpath(os.path.abspath(name))
-        link = None
-
-        if is_url(name):
-            link = Link(name)
-        elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')):
-            if not is_installable_dir(path):
-                raise InstallationError("Directory %r is not installable. File 'setup.py' not found.", name)
-            link = Link(path_to_url(name))
-        elif is_archive_file(path):
-            if not os.path.isfile(path):
-                logger.warn('Requirement %r looks like a filename, but the file does not exist', name)
-            link = Link(path_to_url(name))
-
-        # If the line has an egg= definition, but isn't editable, pull the requirement out.
-        # Otherwise, assume the name is the req for the non URL/path/archive case.
-        if link and req is None:
-            url = link.url_fragment
-            req = link.egg_fragment
-
-            # Handle relative file URLs
-            if link.scheme == 'file' and re.search(r'\.\./', url):
-                url = path_to_url(os.path.normpath(os.path.abspath(link.path)))
-
-        else:
-            req = name
-
-        return cls(req, comes_from, url=url)
-
-    def __str__(self):
-        if self.req:
-            s = str(self.req)
-            if self.url:
-                s += ' from %s' % self.url
-        else:
-            s = self.url
-        if self.satisfied_by is not None:
-            s += ' in %s' % display_path(self.satisfied_by.location)
-        if self.comes_from:
-            if isinstance(self.comes_from, string_types):
-                comes_from = self.comes_from
-            else:
-                comes_from = self.comes_from.from_path()
-            if comes_from:
-                s += ' (from %s)' % comes_from
-        return s
-
-    def from_path(self):
-        if self.req is None:
-            return None
-        s = str(self.req)
-        if self.comes_from:
-            if isinstance(self.comes_from, string_types):
-                comes_from = self.comes_from
-            else:
-                comes_from = self.comes_from.from_path()
-            if comes_from:
-                s += '->' + comes_from
-        return s
-
-    def build_location(self, build_dir, unpack=True):
-        if self._temp_build_dir is not None:
-            return self._temp_build_dir
-        if self.req is None:
-            self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
-            self._ideal_build_dir = build_dir
-            return self._temp_build_dir
-        if self.editable:
-            name = self.name.lower()
-        else:
-            name = self.name
-        # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
-        if not os.path.exists(build_dir):
-            _make_build_dir(build_dir)
-        return os.path.join(build_dir, name)
-
-    def correct_build_location(self):
-        """If the build location was a temporary directory, this will move it
-        to a new more permanent location"""
-        if self.source_dir is not None:
-            return
-        assert self.req is not None
-        assert self._temp_build_dir
-        old_location = self._temp_build_dir
-        new_build_dir = self._ideal_build_dir
-        del self._ideal_build_dir
-        if self.editable:
-            name = self.name.lower()
-        else:
-            name = self.name
-        new_location = os.path.join(new_build_dir, name)
-        if not os.path.exists(new_build_dir):
-            logger.debug('Creating directory %s' % new_build_dir)
-            _make_build_dir(new_build_dir)
-        if os.path.exists(new_location):
-            raise InstallationError(
-                'A package already exists in %s; please remove it to continue'
-                % display_path(new_location))
-        logger.debug('Moving package %s from %s to new location %s'
-                     % (self, display_path(old_location), display_path(new_location)))
-        shutil.move(old_location, new_location)
-        self._temp_build_dir = new_location
-        self.source_dir = new_location
-        self._egg_info_path = None
-
-    @property
-    def name(self):
-        if self.req is None:
-            return None
-        return self.req.project_name
-
-    @property
-    def url_name(self):
-        if self.req is None:
-            return None
-        return urllib.quote(self.req.unsafe_name)
-
-    @property
-    def setup_py(self):
-        return os.path.join(self.source_dir, 'setup.py')
-
-    def run_egg_info(self, force_root_egg_info=False):
-        assert self.source_dir
-        if self.name:
-            logger.notify('Running setup.py egg_info for package %s' % self.name)
-        else:
-            logger.notify('Running setup.py egg_info for package from %s' % self.url)
-        logger.indent += 2
-        try:
-            script = self._run_setup_py
-            script = script.replace('__SETUP_PY__', repr(self.setup_py))
-            script = script.replace('__PKG_NAME__', repr(self.name))
-            # We can't put the .egg-info files at the root, because then the source code will be mistaken
-            # for an installed egg, causing problems
-            if self.editable or force_root_egg_info:
-                egg_base_option = []
-            else:
-                egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
-                if not os.path.exists(egg_info_dir):
-                    os.makedirs(egg_info_dir)
-                egg_base_option = ['--egg-base', 'pip-egg-info']
-            call_subprocess(
-                [sys.executable, '-c', script, 'egg_info'] + egg_base_option,
-                cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
-                command_level=logger.VERBOSE_DEBUG,
-                command_desc='python setup.py egg_info')
-        finally:
-            logger.indent -= 2
-        if not self.req:
-            self.req = pkg_resources.Requirement.parse(
-                "%(Name)s==%(Version)s" % self.pkg_info())
-            self.correct_build_location()
-
-    ## FIXME: This is a lame hack, entirely for PasteScript which has
-    ## a self-provided entry point that causes this awkwardness
-    _run_setup_py = """
-__file__ = __SETUP_PY__
-from setuptools.command import egg_info
-def replacement_run(self):
-    self.mkpath(self.egg_info)
-    installer = self.distribution.fetch_build_egg
-    for ep in egg_info.iter_entry_points('egg_info.writers'):
-        # require=False is the change we're making:
-        writer = ep.load(require=False)
-        if writer:
-            writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
-    self.find_sources()
-egg_info.egg_info.run = replacement_run
-exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
-"""
-
-    def egg_info_data(self, filename):
-        if self.satisfied_by is not None:
-            if not self.satisfied_by.has_metadata(filename):
-                return None
-            return self.satisfied_by.get_metadata(filename)
-        assert self.source_dir
-        filename = self.egg_info_path(filename)
-        if not os.path.exists(filename):
-            return None
-        fp = open(filename, 'r')
-        data = fp.read()
-        fp.close()
-        return data
-
-    def egg_info_path(self, filename):
-        if self._egg_info_path is None:
-            if self.editable:
-                base = self.source_dir
-            else:
-                base = os.path.join(self.source_dir, 'pip-egg-info')
-            filenames = os.listdir(base)
-            if self.editable:
-                filenames = []
-                for root, dirs, files in os.walk(base):
-                    for dir in vcs.dirnames:
-                        if dir in dirs:
-                            dirs.remove(dir)
-                    for dir in dirs:
-                        # Don't search in anything that looks like a virtualenv environment
-                        if (os.path.exists(os.path.join(root, dir, 'bin', 'python'))
-                            or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):
-                            dirs.remove(dir)
-                        # Also don't search through tests
-                        if dir == 'test' or dir == 'tests':
-                            dirs.remove(dir)
-                    filenames.extend([os.path.join(root, dir)
-                                     for dir in dirs])
-                filenames = [f for f in filenames if f.endswith('.egg-info')]
-
-            if not filenames:
-                raise InstallationError('No files/directores in %s (from %s)' % (base, filename))
-            assert filenames, "No files/directories in %s (from %s)" % (base, filename)
-
-            # if we have more than one match, we pick the toplevel one.  This can
-            # easily be the case if there is a dist folder which contains an
-            # extracted tarball for testing purposes.
-            if len(filenames) > 1:
-                filenames.sort(key=lambda x: x.count(os.path.sep) +
-                                             (os.path.altsep and
-                                              x.count(os.path.altsep) or 0))
-            self._egg_info_path = os.path.join(base, filenames[0])
-        return os.path.join(self._egg_info_path, filename)
-
-    def egg_info_lines(self, filename):
-        data = self.egg_info_data(filename)
-        if not data:
-            return []
-        result = []
-        for line in data.splitlines():
-            line = line.strip()
-            if not line or line.startswith('#'):
-                continue
-            result.append(line)
-        return result
-
-    def pkg_info(self):
-        p = FeedParser()
-        data = self.egg_info_data('PKG-INFO')
-        if not data:
-            logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
-        p.feed(data or '')
-        return p.close()
-
-    @property
-    def dependency_links(self):
-        return self.egg_info_lines('dependency_links.txt')
-
-    _requirements_section_re = re.compile(r'\[(.*?)\]')
-
-    def requirements(self, extras=()):
-        in_extra = None
-        for line in self.egg_info_lines('requires.txt'):
-            match = self._requirements_section_re.match(line.lower())
-            if match:
-                in_extra = match.group(1)
-                continue
-            if in_extra and in_extra not in extras:
-                logger.debug('skipping extra %s' % in_extra)
-                # Skip requirement for an extra we aren't requiring
-                continue
-            yield line
-
-    @property
-    def absolute_versions(self):
-        for qualifier, version in self.req.specs:
-            if qualifier == '==':
-                yield version
-
-    @property
-    def installed_version(self):
-        return self.pkg_info()['version']
-
-    def assert_source_matches_version(self):
-        assert self.source_dir
-        version = self.installed_version
-        if version not in self.req:
-            logger.warn('Requested %s, but installing version %s' % (self, self.installed_version))
-        else:
-            logger.debug('Source in %s has version %s, which satisfies requirement %s'
-                         % (display_path(self.source_dir), version, self))
-
-    def update_editable(self, obtain=True):
-        if not self.url:
-            logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
-            return
-        assert self.editable
-        assert self.source_dir
-        if self.url.startswith('file:'):
-            # Static paths don't get updated
-            return
-        assert '+' in self.url, "bad url: %r" % self.url
-        if not self.update:
-            return
-        vc_type, url = self.url.split('+', 1)
-        backend = vcs.get_backend(vc_type)
-        if backend:
-            vcs_backend = backend(self.url)
-            if obtain:
-                vcs_backend.obtain(self.source_dir)
-            else:
-                vcs_backend.export(self.source_dir)
-        else:
-            assert 0, (
-                'Unexpected version control type (in %s): %s'
-                % (self.url, vc_type))
-
-    def uninstall(self, auto_confirm=False):
-        """
-        Uninstall the distribution currently satisfying this requirement.
-
-        Prompts before removing or modifying files unless
-        ``auto_confirm`` is True.
-
-        Refuses to delete or modify files outside of ``sys.prefix`` -
-        thus uninstallation within a virtual environment can only
-        modify that virtual environment, even if the virtualenv is
-        linked to global site-packages.
-
-        """
-        if not self.check_if_exists():
-            raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
-        dist = self.satisfied_by or self.conflicts_with
-
-        paths_to_remove = UninstallPathSet(dist)
-
-        pip_egg_info_path = os.path.join(dist.location,
-                                         dist.egg_name()) + '.egg-info'
-        # workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
-        debian_egg_info_path = pip_egg_info_path.replace(
-            '-py%s' % pkg_resources.PY_MAJOR, '')
-        easy_install_egg = dist.egg_name() + '.egg'
-        develop_egg_link = egg_link_path(dist)
-
-        pip_egg_info_exists = os.path.exists(pip_egg_info_path)
-        debian_egg_info_exists = os.path.exists(debian_egg_info_path)
-        if pip_egg_info_exists or debian_egg_info_exists:
-            # package installed by pip
-            if pip_egg_info_exists:
-                egg_info_path = pip_egg_info_path
-            else:
-                egg_info_path = debian_egg_info_path
-            paths_to_remove.add(egg_info_path)
-            if dist.has_metadata('installed-files.txt'):
-                for installed_file in dist.get_metadata('installed-files.txt').splitlines():
-                    path = os.path.normpath(os.path.join(egg_info_path, installed_file))
-                    paths_to_remove.add(path)
-            if dist.has_metadata('top_level.txt'):
-                if dist.has_metadata('namespace_packages.txt'):
-                    namespaces = dist.get_metadata('namespace_packages.txt')
-                else:
-                    namespaces = []
-                for top_level_pkg in [p for p
-                                      in dist.get_metadata('top_level.txt').splitlines()
-                                      if p and p not in namespaces]:
-                    path = os.path.join(dist.location, top_level_pkg)
-                    paths_to_remove.add(path)
-                    paths_to_remove.add(path + '.py')
-                    paths_to_remove.add(path + '.pyc')
-
-        elif dist.location.endswith(easy_install_egg):
-            # package installed by easy_install
-            paths_to_remove.add(dist.location)
-            easy_install_pth = os.path.join(os.path.dirname(dist.location),
-                                            'easy-install.pth')
-            paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
-
-        elif os.path.isfile(develop_egg_link):
-            # develop egg
-            fh = open(develop_egg_link, 'r')
-            link_pointer = os.path.normcase(fh.readline().strip())
-            fh.close()
-            assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
-            paths_to_remove.add(develop_egg_link)
-            easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
-                                            'easy-install.pth')
-            paths_to_remove.add_pth(easy_install_pth, dist.location)
-
-        # find distutils scripts= scripts
-        if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
-            for script in dist.metadata_listdir('scripts'):
-                paths_to_remove.add(os.path.join(bin_py, script))
-                if sys.platform == 'win32':
-                    paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
-
-        # find console_scripts
-        if dist.has_metadata('entry_points.txt'):
-            config = ConfigParser.SafeConfigParser()
-            config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
-            if config.has_section('console_scripts'):
-                for name, value in config.items('console_scripts'):
-                    paths_to_remove.add(os.path.join(bin_py, name))
-                    if sys.platform == 'win32':
-                        paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
-                        paths_to_remove.add(os.path.join(bin_py, name) + '.exe.manifest')
-                        paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
-
-        paths_to_remove.remove(auto_confirm)
-        self.uninstalled = paths_to_remove
-
-    def rollback_uninstall(self):
-        if self.uninstalled:
-            self.uninstalled.rollback()
-        else:
-            logger.error("Can't rollback %s, nothing uninstalled."
-                         % (self.project_name,))
-
-    def commit_uninstall(self):
-        if self.uninstalled:
-            self.uninstalled.commit()
-        else:
-            logger.error("Can't commit %s, nothing uninstalled."
-                         % (self.project_name,))
-
-    def archive(self, build_dir):
-        assert self.source_dir
-        create_archive = True
-        archive_name = '%s-%s.zip' % (self.name, self.installed_version)
-        archive_path = os.path.join(build_dir, archive_name)
-        if os.path.exists(archive_path):
-            response = ask_path_exists(
-                'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
-                display_path(archive_path), ('i', 'w', 'b'))
-            if response == 'i':
-                create_archive = False
-            elif response == 'w':
-                logger.warn('Deleting %s' % display_path(archive_path))
-                os.remove(archive_path)
-            elif response == 'b':
-                dest_file = backup_dir(archive_path)
-                logger.warn('Backing up %s to %s'
-                            % (display_path(archive_path), display_path(dest_file)))
-                shutil.move(archive_path, dest_file)
-        if create_archive:
-            zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
-            dir = os.path.normcase(os.path.abspath(self.source_dir))
-            for dirpath, dirnames, filenames in os.walk(dir):
-                if 'pip-egg-info' in dirnames:
-                    dirnames.remove('pip-egg-info')
-                for dirname in dirnames:
-                    dirname = os.path.join(dirpath, dirname)
-                    name = self._clean_zip_name(dirname, dir)
-                    zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
-                    zipdir.external_attr = 0x1ED << 16 # 0o755
-                    zip.writestr(zipdir, '')
-                for filename in filenames:
-                    if filename == PIP_DELETE_MARKER_FILENAME:
-                        continue
-                    filename = os.path.join(dirpath, filename)
-                    name = self._clean_zip_name(filename, dir)
-                    zip.write(filename, self.name + '/' + name)
-            zip.close()
-            logger.indent -= 2
-            logger.notify('Saved %s' % display_path(archive_path))
-
-    def _clean_zip_name(self, name, prefix):
-        assert name.startswith(prefix+os.path.sep), (
-            "name %r doesn't start with prefix %r" % (name, prefix))
-        name = name[len(prefix)+1:]
-        name = name.replace(os.path.sep, '/')
-        return name
-
-    def install(self, install_options, global_options=()):
-        if self.editable:
-            self.install_editable(install_options, global_options)
-            return
-        temp_location = tempfile.mkdtemp('-record', 'pip-')
-        record_filename = os.path.join(temp_location, 'install-record.txt')
-        try:
-            install_args = [
-                sys.executable, '-c',
-                "import setuptools;__file__=%r;"\
-                "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py] +\
-                list(global_options) + [
-                'install',
-                '--single-version-externally-managed',
-                '--record', record_filename]
-
-            if running_under_virtualenv():
-                ## FIXME: I'm not sure if this is a reasonable location; probably not
-                ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
-                install_args += ['--install-headers',
-                                 os.path.join(sys.prefix, 'include', 'site',
-                                              'python' + get_python_version())]
-            logger.notify('Running setup.py install for %s' % self.name)
-            logger.indent += 2
-            try:
-                call_subprocess(install_args + install_options,
-                    cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
-            finally:
-                logger.indent -= 2
-            if not os.path.exists(record_filename):
-                logger.notify('Record file %s not found' % record_filename)
-                return
-            self.install_succeeded = True
-            f = open(record_filename)
-            for line in f:
-                line = line.strip()
-                if line.endswith('.egg-info'):
-                    egg_info_dir = line
-                    break
-            else:
-                logger.warn('Could not find .egg-info directory in install record for %s' % self)
-                ## FIXME: put the record somewhere
-                ## FIXME: should this be an error?
-                return
-            f.close()
-            new_lines = []
-            f = open(record_filename)
-            for line in f:
-                filename = line.strip()
-                if os.path.isdir(filename):
-                    filename += os.path.sep
-                new_lines.append(make_path_relative(filename, egg_info_dir))
-            f.close()
-            f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
-            f.write('\n'.join(new_lines)+'\n')
-            f.close()
-        finally:
-            if os.path.exists(record_filename):
-                os.remove(record_filename)
-            os.rmdir(temp_location)
-
-    def remove_temporary_source(self):
-        """Remove the source files from this requirement, if they are marked
-        for deletion"""
-        if self.is_bundle or os.path.exists(self.delete_marker_filename):
-            logger.info('Removing source in %s' % self.source_dir)
-            if self.source_dir:
-                rmtree(self.source_dir)
-            self.source_dir = None
-        if self._temp_build_dir and os.path.exists(self._temp_build_dir):
-            rmtree(self._temp_build_dir)
-        self._temp_build_dir = None
-
-    def install_editable(self, install_options, global_options=()):
-        logger.notify('Running setup.py develop for %s' % self.name)
-        logger.indent += 2
-        try:
-            ## FIXME: should we do --install-headers here too?
-            call_subprocess(
-                [sys.executable, '-c',
-                 "import setuptools; __file__=%r; exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py]
-                + list(global_options) + ['develop', '--no-deps'] + list(install_options),
-
-                cwd=self.source_dir, filter_stdout=self._filter_install,
-                show_stdout=False)
-        finally:
-            logger.indent -= 2
-        self.install_succeeded = True
-
-    def _filter_install(self, line):
-        level = logger.NOTIFY
-        for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
-                      r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
-                      r'^byte-compiling ',
-                      # Not sure what this warning is, but it seems harmless:
-                      r"^warning: manifest_maker: standard file '-c' not found$"]:
-            if re.search(regex, line.strip()):
-                level = logger.INFO
-                break
-        return (level, line)
-
-    def check_if_exists(self):
-        """Find an installed distribution that satisfies or conflicts
-        with this requirement, and set self.satisfied_by or
-        self.conflicts_with appropriately."""
-        if self.req is None:
-            return False
-        try:
-            self.satisfied_by = pkg_resources.get_distribution(self.req)
-        except pkg_resources.DistributionNotFound:
-            return False
-        except pkg_resources.VersionConflict:
-            self.conflicts_with = pkg_resources.get_distribution(self.req.project_name)
-        return True
-
-    @property
-    def is_bundle(self):
-        if self._is_bundle is not None:
-            return self._is_bundle
-        base = self._temp_build_dir
-        if not base:
-            ## FIXME: this doesn't seem right:
-            return False
-        self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
-                           or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
-        return self._is_bundle
-
-    def bundle_requirements(self):
-        for dest_dir in self._bundle_editable_dirs:
-            package = os.path.basename(dest_dir)
-            ## FIXME: svnism:
-            for vcs_backend in vcs.backends:
-                url = rev = None
-                vcs_bundle_file = os.path.join(
-                    dest_dir, vcs_backend.bundle_file)
-                if os.path.exists(vcs_bundle_file):
-                    vc_type = vcs_backend.name
-                    fp = open(vcs_bundle_file)
-                    content = fp.read()
-                    fp.close()
-                    url, rev = vcs_backend().parse_vcs_bundle_file(content)
-                    break
-            if url:
-                url = '%s+%s@%s' % (vc_type, url, rev)
-            else:
-                url = None
-            yield InstallRequirement(
-                package, self, editable=True, url=url,
-                update=False, source_dir=dest_dir)
-        for dest_dir in self._bundle_build_dirs:
-            package = os.path.basename(dest_dir)
-            yield InstallRequirement(
-                package, self,
-                source_dir=dest_dir)
-
-    def move_bundle_files(self, dest_build_dir, dest_src_dir):
-        base = self._temp_build_dir
-        assert base
-        src_dir = os.path.join(base, 'src')
-        build_dir = os.path.join(base, 'build')
-        bundle_build_dirs = []
-        bundle_editable_dirs = []
-        for source_dir, dest_dir, dir_collection in [
-            (src_dir, dest_src_dir, bundle_editable_dirs),
-            (build_dir, dest_build_dir, bundle_build_dirs)]:
-            if os.path.exists(source_dir):
-                for dirname in os.listdir(source_dir):
-                    dest = os.path.join(dest_dir, dirname)
-                    dir_collection.append(dest)
-                    if os.path.exists(dest):
-                        logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
-                                    % (dest, dirname, self))
-                        continue
-                    if not os.path.exists(dest_dir):
-                        logger.info('Creating directory %s' % dest_dir)
-                        os.makedirs(dest_dir)
-                    shutil.move(os.path.join(source_dir, dirname), dest)
-                if not os.listdir(source_dir):
-                    os.rmdir(source_dir)
-        self._temp_build_dir = None
-        self._bundle_build_dirs = bundle_build_dirs
-        self._bundle_editable_dirs = bundle_editable_dirs
-
-    @property
-    def delete_marker_filename(self):
-        assert self.source_dir
-        return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)
-
-
-DELETE_MARKER_MESSAGE = '''\
-This file is placed here by pip to indicate the source was put
-here by pip.
-
-Once this package is successfully installed this source code will be
-deleted (unless you remove this file).
-'''
-
-
-class Requirements(object):
-
-    def __init__(self):
-        self._keys = []
-        self._dict = {}
-
-    def keys(self):
-        return self._keys
-
-    def values(self):
-        values_list = []
-        for key in self._keys:
-            values_list.append(self._dict[key])
-        return values_list
-
-    def __contains__(self, item):
-        return item in self._keys
-
-    def __setitem__(self, key, value):
-        if key not in self._keys:
-            self._keys.append(key)
-        self._dict[key] = value
-
-    def __getitem__(self, key):
-        return self._dict[key]
-
-    def __repr__(self):
-        values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
-        return 'Requirements({%s})' % ', '.join(values)
-
-
-class RequirementSet(object):
-
-    def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
-                 upgrade=False, ignore_installed=False,
-                 ignore_dependencies=False, force_reinstall=False):
-        self.build_dir = build_dir
-        self.src_dir = src_dir
-        self.download_dir = download_dir
-        self.download_cache = download_cache
-        self.upgrade = upgrade
-        self.ignore_installed = ignore_installed
-        self.force_reinstall = force_reinstall
-        self.requirements = Requirements()
-        # Mapping of alias: real_name
-        self.requirement_aliases = {}
-        self.unnamed_requirements = []
-        self.ignore_dependencies = ignore_dependencies
-        self.successfully_downloaded = []
-        self.successfully_installed = []
-        self.reqs_to_cleanup = []
-
-    def __str__(self):
-        reqs = [req for req in self.requirements.values()
-                if not req.comes_from]
-        reqs.sort(key=lambda req: req.name.lower())
-        return ' '.join([str(req.req) for req in reqs])
-
-    def add_requirement(self, install_req):
-        name = install_req.name
-        if not name:
-            self.unnamed_requirements.append(install_req)
-        else:
-            if self.has_requirement(name):
-                raise InstallationError(
-                    'Double requirement given: %s (aready in %s, name=%r)'
-                    % (install_req, self.get_requirement(name), name))
-            self.requirements[name] = install_req
-            ## FIXME: what about other normalizations?  E.g., _ vs. -?
-            if name.lower() != name:
-                self.requirement_aliases[name.lower()] = name
-
-    def has_requirement(self, project_name):
-        for name in project_name, project_name.lower():
-            if name in self.requirements or name in self.requirement_aliases:
-                return True
-        return False
-
-    @property
-    def has_requirements(self):
-        return list(self.requirements.values()) or self.unnamed_requirements
-
-    @property
-    def has_editables(self):
-        if any(req.editable for req in self.requirements.values()):
-            return True
-        if any(req.editable for req in self.unnamed_requirements):
-            return True
-        return False
-
-    @property
-    def is_download(self):
-        if self.download_dir:
-            self.download_dir = os.path.expanduser(self.download_dir)
-            if os.path.exists(self.download_dir):
-                return True
-            else:
-                logger.fatal('Could not find download directory')
-                raise InstallationError(
-                    "Could not find or access download directory '%s'"
-                    % display_path(self.download_dir))
-        return False
-
-    def get_requirement(self, project_name):
-        for name in project_name, project_name.lower():
-            if name in self.requirements:
-                return self.requirements[name]
-            if name in self.requirement_aliases:
-                return self.requirements[self.requirement_aliases[name]]
-        raise KeyError("No project with the name %r" % project_name)
-
-    def uninstall(self, auto_confirm=False):
-        for req in self.requirements.values():
-            req.uninstall(auto_confirm=auto_confirm)
-            req.commit_uninstall()
-
-    def locate_files(self):
-        ## FIXME: duplicates code from install_files; relevant code should
-        ##        probably be factored out into a separate method
-        unnamed = list(self.unnamed_requirements)
-        reqs = list(self.requirements.values())
-        while reqs or unnamed:
-            if unnamed:
-                req_to_install = unnamed.pop(0)
-            else:
-                req_to_install = reqs.pop(0)
-            install_needed = True
-            if not self.ignore_installed and not req_to_install.editable:
-                req_to_install.check_if_exists()
-                if req_to_install.satisfied_by:
-                    if self.upgrade:
-                        req_to_install.conflicts_with = req_to_install.satisfied_by
-                        req_to_install.satisfied_by = None
-                    else:
-                        install_needed = False
-                if req_to_install.satisfied_by:
-                    logger.notify('Requirement already satisfied '
-                                  '(use --upgrade to upgrade): %s'
-                                  % req_to_install)
-
-            if req_to_install.editable:
-                if req_to_install.source_dir is None:
-                    req_to_install.source_dir = req_to_install.build_location(self.src_dir)
-            elif install_needed:
-                req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download)
-
-            if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir):
-                raise InstallationError('Could not install requirement %s '
-                                       'because source folder %s does not exist '
-                                       '(perhaps --no-download was used without first running '
-                                       'an equivalent install with --no-install?)'
-                                       % (req_to_install, req_to_install.source_dir))
-
-    def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
-        """Prepare process. Create temp directories, download and/or unpack files."""
-        unnamed = list(self.unnamed_requirements)
-        reqs = list(self.requirements.values())
-        while reqs or unnamed:
-            if unnamed:
-                req_to_install = unnamed.pop(0)
-            else:
-                req_to_install = reqs.pop(0)
-            install = True
-            best_installed = False
-            if not self.ignore_installed and not req_to_install.editable:
-                req_to_install.check_if_exists()
-                if req_to_install.satisfied_by:
-                    if self.upgrade:
-                        if not self.force_reinstall:
-                            try:
-                                url = finder.find_requirement(
-                                    req_to_install, self.upgrade)
-                            except BestVersionAlreadyInstalled:
-                                best_installed = True
-                                install = False
-                            else:
-                                # Avoid the need to call find_requirement again
-                                req_to_install.url = url.url
-
-                        if not best_installed:
-                            req_to_install.conflicts_with = req_to_install.satisfied_by
-                            req_to_install.satisfied_by = None
-                    else:
-                        install = False
-                if req_to_install.satisfied_by:
-                    if best_installed:
-                        logger.notify('Requirement already up-to-date: %s'
-                                      % req_to_install)
-                    else:
-                        logger.notify('Requirement already satisfied '
-                                      '(use --upgrade to upgrade): %s'
-                                      % req_to_install)
-            if req_to_install.editable:
-                logger.notify('Obtaining %s' % req_to_install)
-            elif install:
-                if req_to_install.url and req_to_install.url.lower().startswith('file:'):
-                    logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url)))
-                else:
-                    logger.notify('Downloading/unpacking %s' % req_to_install)
-            logger.indent += 2
-            try:
-                is_bundle = False
-                if req_to_install.editable:
-                    if req_to_install.source_dir is None:
-                        location = req_to_install.build_location(self.src_dir)
-                        req_to_install.source_dir = location
-                    else:
-                        location = req_to_install.source_dir
-                    if not os.path.exists(self.build_dir):
-                        _make_build_dir(self.build_dir)
-                    req_to_install.update_editable(not self.is_download)
-                    if self.is_download:
-                        req_to_install.run_egg_info()
-                        req_to_install.archive(self.download_dir)
-                    else:
-                        req_to_install.run_egg_info()
-                elif install:
-                    ##@@ if filesystem packages are not marked
-                    ##editable in a req, a non deterministic error
-                    ##occurs when the script attempts to unpack the
-                    ##build directory
-
-                    location = req_to_install.build_location(self.build_dir, not self.is_download)
-                    ## FIXME: is the existance of the checkout good enough to use it?  I don't think so.
-                    unpack = True
-                    url = None
-                    if not os.path.exists(os.path.join(location, 'setup.py')):
-                        ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
-                        if req_to_install.url is None:
-                            url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
-                        else:
-                            ## FIXME: should req_to_install.url already be a link?
-                            url = Link(req_to_install.url)
-                            assert url
-                        if url:
-                            try:
-                                self.unpack_url(url, location, self.is_download)
-                            except HTTPError:
-                                e = sys.exc_info()[1]
-                                logger.fatal('Could not install requirement %s because of error %s'
-                                             % (req_to_install, e))
-                                raise InstallationError(
-                                    'Could not install requirement %s because of HTTP error %s for URL %s'
-                                    % (req_to_install, e, url))
-                        else:
-                            unpack = False
-                    if unpack:
-                        is_bundle = req_to_install.is_bundle
-                        if is_bundle:
-                            req_to_install.move_bundle_files(self.build_dir, self.src_dir)
-                            for subreq in req_to_install.bundle_requirements():
-                                reqs.append(subreq)
-                                self.add_requirement(subreq)
-                        elif self.is_download:
-                            req_to_install.source_dir = location
-                            req_to_install.run_egg_info()
-                            if url and url.scheme in vcs.all_schemes:
-                                req_to_install.archive(self.download_dir)
-                        else:
-                            req_to_install.source_dir = location
-                            req_to_install.run_egg_info()
-                            if force_root_egg_info:
-                                # We need to run this to make sure that the .egg-info/
-                                # directory is created for packing in the bundle
-                                req_to_install.run_egg_info(force_root_egg_info=True)
-                            req_to_install.assert_source_matches_version()
-                            #@@ sketchy way of identifying packages not grabbed from an index
-                            if bundle and req_to_install.url:
-                                self.copy_to_build_dir(req_to_install)
-                                install = False
-                        # req_to_install.req is only avail after unpack for URL pkgs
-                        # repeat check_if_exists to uninstall-on-upgrade (#14)
-                        req_to_install.check_if_exists()
-                        if req_to_install.satisfied_by:
-                            if self.upgrade or self.ignore_installed:
-                                req_to_install.conflicts_with = req_to_install.satisfied_by
-                                req_to_install.satisfied_by = None
-                            else:
-                                install = False
-                if not is_bundle:
-                    ## FIXME: shouldn't be globally added:
-                    finder.add_dependency_links(req_to_install.dependency_links)
-                    if (req_to_install.extras):
-                        logger.notify("Installing extra requirements: %r" % ','.join(req_to_install.extras))
-                    if not self.ignore_dependencies:
-                        for req in req_to_install.requirements(req_to_install.extras):
-                            try:
-                                name = pkg_resources.Requirement.parse(req).project_name
-                            except ValueError:
-                                e = sys.exc_info()[1]
-                                ## FIXME: proper warning
-                                logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
-                                continue
-                            if self.has_requirement(name):
-                                ## FIXME: check for conflict
-                                continue
-                            subreq = InstallRequirement(req, req_to_install)
-                            reqs.append(subreq)
-                            self.add_requirement(subreq)
-                    if req_to_install.name not in self.requirements:
-                        self.requirements[req_to_install.name] = req_to_install
-                    if self.is_download:
-                        self.reqs_to_cleanup.append(req_to_install)
-                else:
-                    self.reqs_to_cleanup.append(req_to_install)
-
-                if install:
-                    self.successfully_downloaded.append(req_to_install)
-                    if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
-                        self.copy_to_build_dir(req_to_install)
-            finally:
-                logger.indent -= 2
-
-    def cleanup_files(self, bundle=False):
-        """Clean up files, remove builds."""
-        logger.notify('Cleaning up...')
-        logger.indent += 2
-        for req in self.reqs_to_cleanup:
-            req.remove_temporary_source()
-
-        remove_dir = []
-        if self._pip_has_created_build_dir():
-            remove_dir.append(self.build_dir)
-
-        # The source dir of a bundle can always be removed.
-        # FIXME: not if it pre-existed the bundle!
-        if bundle:
-            remove_dir.append(self.src_dir)
-
-        for dir in remove_dir:
-            if os.path.exists(dir):
-                logger.info('Removing temporary dir %s...' % dir)
-                rmtree(dir)
-
-        logger.indent -= 2
-
-    def _pip_has_created_build_dir(self):
-        return (self.build_dir == build_prefix and
-                os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)))
-
-    def copy_to_build_dir(self, req_to_install):
-        target_dir = req_to_install.editable and self.src_dir or self.build_dir
-        logger.info("Copying %s to %s" % (req_to_install.name, target_dir))
-        dest = os.path.join(target_dir, req_to_install.name)
-        copytree(req_to_install.source_dir, dest)
-        call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
-                        command_desc='python setup.py clean')
-
-    def unpack_url(self, link, location, only_download=False):
-        if only_download:
-            loc = self.download_dir
-        else:
-            loc = location
-        if is_vcs_url(link):
-            return unpack_vcs_link(link, loc, only_download)
-        elif is_file_url(link):
-            return unpack_file_url(link, loc)
-        else:
-            if self.download_cache:
-                self.download_cache = os.path.expanduser(self.download_cache)
-            retval = unpack_http_url(link, location, self.download_cache, self.download_dir)
-            if only_download:
-                _write_delete_marker_message(os.path.join(location, PIP_DELETE_MARKER_FILENAME))
-            return retval
-
-    def install(self, install_options, global_options=()):
-        """Install everything in this set (after having downloaded and unpacked the packages)"""
-        to_install = [r for r in self.requirements.values()
-                      if not r.satisfied_by]
-
-        if to_install:
-            logger.notify('Installing collected packages: %s' % ', '.join([req.name for req in to_install]))
-        logger.indent += 2
-        try:
-            for requirement in to_install:
-                if requirement.conflicts_with:
-                    logger.notify('Found existing installation: %s'
-                                  % requirement.conflicts_with)
-                    logger.indent += 2
-                    try:
-                        requirement.uninstall(auto_confirm=True)
-                    finally:
-                        logger.indent -= 2
-                try:
-                    requirement.install(install_options, global_options)
-                except:
-                    # if install did not succeed, rollback previous uninstall
-                    if requirement.conflicts_with and not requirement.install_succeeded:
-                        requirement.rollback_uninstall()
-                    raise
-                else:
-                    if requirement.conflicts_with and requirement.install_succeeded:
-                        requirement.commit_uninstall()
-                requirement.remove_temporary_source()
-        finally:
-            logger.indent -= 2
-        self.successfully_installed = to_install
-
-    def create_bundle(self, bundle_filename):
-        ## FIXME: can't decide which is better; zip is easier to read
-        ## random files from, but tar.bz2 is smaller and not as lame a
-        ## format.
-
-        ## FIXME: this file should really include a manifest of the
-        ## packages, maybe some other metadata files.  It would make
-        ## it easier to detect as well.
-        zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
-        vcs_dirs = []
-        for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
-            dir = os.path.normcase(os.path.abspath(dir))
-            for dirpath, dirnames, filenames in os.walk(dir):
-                for backend in vcs.backends:
-                    vcs_backend = backend()
-                    vcs_url = vcs_rev = None
-                    if vcs_backend.dirname in dirnames:
-                        for vcs_dir in vcs_dirs:
-                            if dirpath.startswith(vcs_dir):
-                                # vcs bundle file already in parent directory
-                                break
-                        else:
-                            vcs_url, vcs_rev = vcs_backend.get_info(
-                                os.path.join(dir, dirpath))
-                            vcs_dirs.append(dirpath)
-                        vcs_bundle_file = vcs_backend.bundle_file
-                        vcs_guide = vcs_backend.guide % {'url': vcs_url,
-                                                         'rev': vcs_rev}
-                        dirnames.remove(vcs_backend.dirname)
-                        break
-                if 'pip-egg-info' in dirnames:
-                    dirnames.remove('pip-egg-info')
-                for dirname in dirnames:
-                    dirname = os.path.join(dirpath, dirname)
-                    name = self._clean_zip_name(dirname, dir)
-                    zip.writestr(basename + '/' + name + '/', '')
-                for filename in filenames:
-                    if filename == PIP_DELETE_MARKER_FILENAME:
-                        continue
-                    filename = os.path.join(dirpath, filename)
-                    name = self._clean_zip_name(filename, dir)
-                    zip.write(filename, basename + '/' + name)
-                if vcs_url:
-                    name = os.path.join(dirpath, vcs_bundle_file)
-                    name = self._clean_zip_name(name, dir)
-                    zip.writestr(basename + '/' + name, vcs_guide)
-
-        zip.writestr('pip-manifest.txt', self.bundle_requirements())
-        zip.close()
-
-    BUNDLE_HEADER = '''\
-# This is a pip bundle file, that contains many source packages
-# that can be installed as a group.  You can install this like:
-#     pip this_file.zip
-# The rest of the file contains a list of all the packages included:
-'''
-
-    def bundle_requirements(self):
-        parts = [self.BUNDLE_HEADER]
-        for req in [req for req in self.requirements.values()
-                    if not req.comes_from]:
-            parts.append('%s==%s\n' % (req.name, req.installed_version))
-        parts.append('# These packages were installed to satisfy the above requirements:\n')
-        for req in [req for req in self.requirements.values()
-                    if req.comes_from]:
-            parts.append('%s==%s\n' % (req.name, req.installed_version))
-        ## FIXME: should we do something with self.unnamed_requirements?
-        return ''.join(parts)
-
-    def _clean_zip_name(self, name, prefix):
-        assert name.startswith(prefix+os.path.sep), (
-            "name %r doesn't start with prefix %r" % (name, prefix))
-        name = name[len(prefix)+1:]
-        name = name.replace(os.path.sep, '/')
-        return name
-
-
-def _make_build_dir(build_dir):
-    os.makedirs(build_dir)
-    _write_delete_marker_message(os.path.join(build_dir, PIP_DELETE_MARKER_FILENAME))
-
-
-def _write_delete_marker_message(filepath):
-    marker_fp = open(filepath, 'w')
-    marker_fp.write(DELETE_MARKER_MESSAGE)
-    marker_fp.close()
-
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-
-
-def parse_requirements(filename, finder=None, comes_from=None, options=None):
-    skip_match = None
-    skip_regex = options.skip_requirements_regex
-    if skip_regex:
-        skip_match = re.compile(skip_regex)
-    filename, content = get_file_content(filename, comes_from=comes_from)
-    for line_number, line in enumerate(content.splitlines()):
-        line_number += 1
-        line = line.strip()
-        if not line or line.startswith('#'):
-            continue
-        if skip_match and skip_match.search(line):
-            continue
-        if line.startswith('-r') or line.startswith('--requirement'):
-            if line.startswith('-r'):
-                req_url = line[2:].strip()
-            else:
-                req_url = line[len('--requirement'):].strip().strip('=')
-            if _scheme_re.search(filename):
-                # Relative to a URL
-                req_url = urlparse.urljoin(filename, req_url)
-            elif not _scheme_re.search(req_url):
-                req_url = os.path.join(os.path.dirname(filename), req_url)
-            for item in parse_requirements(req_url, finder, comes_from=filename, options=options):
-                yield item
-        elif line.startswith('-Z') or line.startswith('--always-unzip'):
-            # No longer used, but previously these were used in
-            # requirement files, so we'll ignore.
-            pass
-        elif line.startswith('-f') or line.startswith('--find-links'):
-            if line.startswith('-f'):
-                line = line[2:].strip()
-            else:
-                line = line[len('--find-links'):].strip().lstrip('=')
-            ## FIXME: it would be nice to keep track of the source of
-            ## the find_links:
-            if finder:
-                finder.find_links.append(line)
-        elif line.startswith('-i') or line.startswith('--index-url'):
-            if line.startswith('-i'):
-                line = line[2:].strip()
-            else:
-                line = line[len('--index-url'):].strip().lstrip('=')
-            if finder:
-                finder.index_urls = [line]
-        elif line.startswith('--extra-index-url'):
-            line = line[len('--extra-index-url'):].strip().lstrip('=')
-            if finder:
-                finder.index_urls.append(line)
-        else:
-            comes_from = '-r %s (line %s)' % (filename, line_number)
-            if line.startswith('-e') or line.startswith('--editable'):
-                if line.startswith('-e'):
-                    line = line[2:].strip()
-                else:
-                    line = line[len('--editable'):].strip().lstrip('=')
-                req = InstallRequirement.from_editable(
-                    line, comes_from=comes_from, default_vcs=options.default_vcs)
-            else:
-                req = InstallRequirement.from_line(line, comes_from)
-            yield req
-
-
-def parse_editable(editable_req, default_vcs=None):
-    """Parses svn+http://blahblah@rev#egg=Foobar into a requirement
-    (Foobar) and a URL"""
-    url = editable_req
-    if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
-        # Treating it as code that has already been checked out
-        url = path_to_url(url)
-    if url.lower().startswith('file:'):
-        return None, url
-    for version_control in vcs:
-        if url.lower().startswith('%s:' % version_control):
-            url = '%s+%s' % (version_control, url)
-    if '+' not in url:
-        if default_vcs:
-            url = default_vcs + '+' + url
-        else:
-            raise InstallationError(
-                '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req)
-    vc_type = url.split('+', 1)[0].lower()
-    if not vcs.get_backend(vc_type):
-        raise InstallationError(
-            'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req)
-    match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
-    if (not match or not match.group(1)) and vcs.get_backend(vc_type):
-        parts = [p for p in editable_req.split('#', 1)[0].split('/') if p]
-        if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
-            req = parts[-3]
-        elif parts[-1] == 'trunk':
-            req = parts[-2]
-        else:
-            raise InstallationError(
-                '--editable=%s is not the right format; it must have #egg=Package'
-                % editable_req)
-    else:
-        req = match.group(1)
-    ## FIXME: use package_to_requirement?
-    match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
-    if match:
-        # Strip off -dev, -0.2, etc.
-        req = match.group(1)
-    return req, url
-
-
-class UninstallPathSet(object):
-    """A set of file paths to be removed in the uninstallation of a
-    requirement."""
-    def __init__(self, dist):
-        self.paths = set()
-        self._refuse = set()
-        self.pth = {}
-        self.dist = dist
-        self.save_dir = None
-        self._moved_paths = []
-
-    def _permitted(self, path):
-        """
-        Return True if the given path is one we are permitted to
-        remove/modify, False otherwise.
-
-        """
-        return is_local(path)
-
-    def _can_uninstall(self):
-        if not dist_is_local(self.dist):
-            logger.notify("Not uninstalling %s at %s, outside environment %s"
-                          % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix))
-            return False
-        return True
-
-    def add(self, path):
-        path = normalize_path(path)
-        if not os.path.exists(path):
-            return
-        if self._permitted(path):
-            self.paths.add(path)
-        else:
-            self._refuse.add(path)
-
-    def add_pth(self, pth_file, entry):
-        pth_file = normalize_path(pth_file)
-        if self._permitted(pth_file):
-            if pth_file not in self.pth:
-                self.pth[pth_file] = UninstallPthEntries(pth_file)
-            self.pth[pth_file].add(entry)
-        else:
-            self._refuse.add(pth_file)
-
-    def compact(self, paths):
-        """Compact a path set to contain the minimal number of paths
-        necessary to contain all paths in the set. If /a/path/ and
-        /a/path/to/a/file.txt are both in the set, leave only the
-        shorter path."""
-        short_paths = set()
-        for path in sorted(paths, key=len):
-            if not any([(path.startswith(shortpath) and
-                         path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
-                        for shortpath in short_paths]):
-                short_paths.add(path)
-        return short_paths
-
-    def _stash(self, path):
-        return os.path.join(
-            self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
-
-    def remove(self, auto_confirm=False):
-        """Remove paths in ``self.paths`` with confirmation (unless
-        ``auto_confirm`` is True)."""
-        if not self._can_uninstall():
-            return
-        logger.notify('Uninstalling %s:' % self.dist.project_name)
-        logger.indent += 2
-        paths = sorted(self.compact(self.paths))
-        try:
-            if auto_confirm:
-                response = 'y'
-            else:
-                for path in paths:
-                    logger.notify(path)
-                response = ask('Proceed (y/n)? ', ('y', 'n'))
-            if self._refuse:
-                logger.notify('Not removing or modifying (outside of prefix):')
-                for path in self.compact(self._refuse):
-                    logger.notify(path)
-            if response == 'y':
-                self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
-                                                 prefix='pip-')
-                for path in paths:
-                    new_path = self._stash(path)
-                    logger.info('Removing file or directory %s' % path)
-                    self._moved_paths.append(path)
-                    renames(path, new_path)
-                for pth in self.pth.values():
-                    pth.remove()
-                logger.notify('Successfully uninstalled %s' % self.dist.project_name)
-
-        finally:
-            logger.indent -= 2
-
-    def rollback(self):
-        """Rollback the changes previously made by remove()."""
-        if self.save_dir is None:
-            logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
-            return False
-        logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
-        for path in self._moved_paths:
-            tmp_path = self._stash(path)
-            logger.info('Replacing %s' % path)
-            renames(tmp_path, path)
-        for pth in self.pth:
-            pth.rollback()
-
-    def commit(self):
-        """Remove temporary save dir: rollback will no longer be possible."""
-        if self.save_dir is not None:
-            rmtree(self.save_dir)
-            self.save_dir = None
-            self._moved_paths = []
-
-
-class UninstallPthEntries(object):
-    def __init__(self, pth_file):
-        if not os.path.isfile(pth_file):
-            raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
-        self.file = pth_file
-        self.entries = set()
-        self._saved_lines = None
-
-    def add(self, entry):
-        entry = os.path.normcase(entry)
-        # On Windows, os.path.normcase converts the entry to use
-        # backslashes.  This is correct for entries that describe absolute
-        # paths outside of site-packages, but all the others use forward
-        # slashes.
-        if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]:
-            entry = entry.replace('\\', '/')
-        self.entries.add(entry)
-
-    def remove(self):
-        logger.info('Removing pth entries from %s:' % self.file)
-        fh = open(self.file, 'rb')
-        # windows uses '\r\n' with py3k, but uses '\n' with py2.x
-        lines = fh.readlines()
-        self._saved_lines = lines
-        fh.close()
-        if any(b('\r\n') in line for line in lines):
-            endline = '\r\n'
-        else:
-            endline = '\n'
-        for entry in self.entries:
-            try:
-                logger.info('Removing entry: %s' % entry)
-                lines.remove(b(entry + endline))
-            except ValueError:
-                pass
-        fh = open(self.file, 'wb')
-        fh.writelines(lines)
-        fh.close()
-
-    def rollback(self):
-        if self._saved_lines is None:
-            logger.error('Cannot roll back changes to %s, none were made' % self.file)
-            return False
-        logger.info('Rolling %s back to previous state' % self.file)
-        fh = open(self.file, 'wb')
-        fh.writelines(self._saved_lines)
-        fh.close()
-        return True
-
-
-class FakeFile(object):
-    """Wrap a list of lines in an object with readline() to make
-    ConfigParser happy."""
-    def __init__(self, lines):
-        self._gen = (l for l in lines)
-
-    def readline(self):
-        try:
-            try:
-                return next(self._gen)
-            except NameError:
-                return self._gen.next()
-        except StopIteration:
-            return ''
-
-    def __iter__(self):
-        return self._gen
diff --git a/vendor/pip-1.2.1/pip/runner.py b/vendor/pip-1.2.1/pip/runner.py
deleted file mode 100644
index be830ad9a9582f4a13aefe52c2f27a3f9a638ee6..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/runner.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import sys
-import os
-
-
-def run():
-    base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-    ## FIXME: this is kind of crude; if we could create a fake pip
-    ## module, then exec into it and update pip.__path__ properly, we
-    ## wouldn't have to update sys.path:
-    sys.path.insert(0, base)
-    import pip
-    return pip.main()
-
-
-if __name__ == '__main__':
-    exit = run()
-    if exit:
-        sys.exit(exit)
diff --git a/vendor/pip-1.2.1/pip/status_codes.py b/vendor/pip-1.2.1/pip/status_codes.py
deleted file mode 100644
index b6208e96445c1b5d8759ac3a68f4d1016c0c8552..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/status_codes.py
+++ /dev/null
@@ -1,5 +0,0 @@
-SUCCESS = 0
-ERROR = 1
-UNKNOWN_ERROR = 2
-VIRTUALENV_NOT_FOUND = 3
-NO_MATCHES_FOUND = 23
diff --git a/vendor/pip-1.2.1/pip/util.py b/vendor/pip-1.2.1/pip/util.py
deleted file mode 100644
index e5ad6df17cf253d5f7be5362bf302c6b6627696e..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/util.py
+++ /dev/null
@@ -1,509 +0,0 @@
-import sys
-import shutil
-import os
-import stat
-import re
-import posixpath
-import pkg_resources
-import zipfile
-import tarfile
-from pip.exceptions import InstallationError, BadCommand
-from pip.backwardcompat import WindowsError, string_types, raw_input
-from pip.locations import site_packages, running_under_virtualenv
-from pip.log import logger
-
-__all__ = ['rmtree', 'display_path', 'backup_dir',
-           'find_command', 'ask', 'Inf',
-           'normalize_name', 'splitext',
-           'format_size', 'is_installable_dir',
-           'is_svn_page', 'file_contents',
-           'split_leading_dir', 'has_leading_dir',
-           'make_path_relative', 'normalize_path',
-           'renames', 'get_terminal_size',
-           'unzip_file', 'untar_file', 'create_download_cache_folder',
-           'cache_download', 'unpack_file']
-
-
-def rmtree(dir, ignore_errors=False):
-    shutil.rmtree(dir, ignore_errors=ignore_errors,
-                  onerror=rmtree_errorhandler)
-
-
-def rmtree_errorhandler(func, path, exc_info):
-    """On Windows, the files in .svn are read-only, so when rmtree() tries to
-    remove them, an exception is thrown.  We catch that here, remove the
-    read-only attribute, and hopefully continue without problems."""
-    exctype, value = exc_info[:2]
-    # On Python 2.4, it will be OSError number 13
-    # On all more recent Pythons, it'll be WindowsError number 5
-    if not ((exctype is WindowsError and value.args[0] == 5) or
-            (exctype is OSError and value.args[0] == 13)):
-        raise
-    # file type should currently be read only
-    if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
-        raise
-    # convert to read/write
-    os.chmod(path, stat.S_IWRITE)
-    # use the original function to repeat the operation
-    func(path)
-
-
-def display_path(path):
-    """Gives the display value for a given path, making it relative to cwd
-    if possible."""
-    path = os.path.normcase(os.path.abspath(path))
-    if path.startswith(os.getcwd() + os.path.sep):
-        path = '.' + path[len(os.getcwd()):]
-    return path
-
-
-def backup_dir(dir, ext='.bak'):
-    """Figure out the name of a directory to back up the given dir to
-    (adding .bak, .bak2, etc)"""
-    n = 1
-    extension = ext
-    while os.path.exists(dir + extension):
-        n += 1
-        extension = ext + str(n)
-    return dir + extension
-
-
-def find_command(cmd, paths=None, pathext=None):
-    """Searches the PATH for the given command and returns its path"""
-    if paths is None:
-        paths = os.environ.get('PATH', '').split(os.pathsep)
-    if isinstance(paths, string_types):
-        paths = [paths]
-    # check if there are funny path extensions for executables, e.g. Windows
-    if pathext is None:
-        pathext = get_pathext()
-    pathext = [ext for ext in pathext.lower().split(os.pathsep)]
-    # don't use extensions if the command ends with one of them
-    if os.path.splitext(cmd)[1].lower() in pathext:
-        pathext = ['']
-    # check if we find the command on PATH
-    for path in paths:
-        # try without extension first
-        cmd_path = os.path.join(path, cmd)
-        for ext in pathext:
-            # then including the extension
-            cmd_path_ext = cmd_path + ext
-            if os.path.isfile(cmd_path_ext):
-                return cmd_path_ext
-        if os.path.isfile(cmd_path):
-            return cmd_path
-    raise BadCommand('Cannot find command %r' % cmd)
-
-
-def get_pathext(default_pathext=None):
-    """Returns the path extensions from environment or a default"""
-    if default_pathext is None:
-        default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
-    pathext = os.environ.get('PATHEXT', default_pathext)
-    return pathext
-
-
-def ask_path_exists(message, options):
-    for action in os.environ.get('PIP_EXISTS_ACTION', ''):
-        if action in options:
-            return action
-    return ask(message, options)
-
-
-def ask(message, options):
-    """Ask the message interactively, with the given possible responses"""
-    while 1:
-        if os.environ.get('PIP_NO_INPUT'):
-            raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
-        response = raw_input(message)
-        response = response.strip().lower()
-        if response not in options:
-            print('Your response (%r) was not one of the expected responses: %s' % (
-                response, ', '.join(options)))
-        else:
-            return response
-
-
-class _Inf(object):
-    """I am bigger than everything!"""
-    def __cmp__(self, a):
-        if self is a:
-            return 0
-        return 1
-
-    def __repr__(self):
-        return 'Inf'
-
-Inf = _Inf()
-del _Inf
-
-
-_normalize_re = re.compile(r'[^a-z]', re.I)
-
-
-def normalize_name(name):
-    return _normalize_re.sub('-', name.lower())
-
-
-def format_size(bytes):
-    if bytes > 1000*1000:
-        return '%.1fMb' % (bytes/1000.0/1000)
-    elif bytes > 10*1000:
-        return '%iKb' % (bytes/1000)
-    elif bytes > 1000:
-        return '%.1fKb' % (bytes/1000.0)
-    else:
-        return '%ibytes' % bytes
-
-
-def is_installable_dir(path):
-    """Return True if `path` is a directory containing a setup.py file."""
-    if not os.path.isdir(path):
-        return False
-    setup_py = os.path.join(path, 'setup.py')
-    if os.path.isfile(setup_py):
-        return True
-    return False
-
-
-def is_svn_page(html):
-    """Returns true if the page appears to be the index page of an svn repository"""
-    return (re.search(r'<title>[^<]*Revision \d+:', html)
-            and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
-
-
-def file_contents(filename):
-    fp = open(filename, 'rb')
-    try:
-        return fp.read().decode('utf-8')
-    finally:
-        fp.close()
-
-
-def split_leading_dir(path):
-    path = str(path)
-    path = path.lstrip('/').lstrip('\\')
-    if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
-                        or '\\' not in path):
-        return path.split('/', 1)
-    elif '\\' in path:
-        return path.split('\\', 1)
-    else:
-        return path, ''
-
-
-def has_leading_dir(paths):
-    """Returns true if all the paths have the same leading path name
-    (i.e., everything is in one subdirectory in an archive)"""
-    common_prefix = None
-    for path in paths:
-        prefix, rest = split_leading_dir(path)
-        if not prefix:
-            return False
-        elif common_prefix is None:
-            common_prefix = prefix
-        elif prefix != common_prefix:
-            return False
-    return True
-
-
-def make_path_relative(path, rel_to):
-    """
-    Make a filename relative, where the filename path, and it is
-    relative to rel_to
-
-        >>> make_relative_path('/usr/share/something/a-file.pth',
-        ...                    '/usr/share/another-place/src/Directory')
-        '../../../something/a-file.pth'
-        >>> make_relative_path('/usr/share/something/a-file.pth',
-        ...                    '/home/user/src/Directory')
-        '../../../usr/share/something/a-file.pth'
-        >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
-        'a-file.pth'
-    """
-    path_filename = os.path.basename(path)
-    path = os.path.dirname(path)
-    path = os.path.normpath(os.path.abspath(path))
-    rel_to = os.path.normpath(os.path.abspath(rel_to))
-    path_parts = path.strip(os.path.sep).split(os.path.sep)
-    rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
-    while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
-        path_parts.pop(0)
-        rel_to_parts.pop(0)
-    full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
-    if full_parts == ['']:
-        return '.' + os.path.sep
-    return os.path.sep.join(full_parts)
-
-
-def normalize_path(path):
-    """
-    Convert a path to its canonical, case-normalized, absolute version.
-
-    """
-    return os.path.normcase(os.path.realpath(path))
-
-
-def splitext(path):
-    """Like os.path.splitext, but take off .tar too"""
-    base, ext = posixpath.splitext(path)
-    if base.lower().endswith('.tar'):
-        ext = base[-4:] + ext
-        base = base[:-4]
-    return base, ext
-
-
-def renames(old, new):
-    """Like os.renames(), but handles renaming across devices."""
-    # Implementation borrowed from os.renames().
-    head, tail = os.path.split(new)
-    if head and tail and not os.path.exists(head):
-        os.makedirs(head)
-
-    shutil.move(old, new)
-
-    head, tail = os.path.split(old)
-    if head and tail:
-        try:
-            os.removedirs(head)
-        except OSError:
-            pass
-
-
-def is_local(path):
-    """
-    Return True if path is within sys.prefix, if we're running in a virtualenv.
-
-    If we're not in a virtualenv, all paths are considered "local."
-
-    """
-    if not running_under_virtualenv():
-        return True
-    return normalize_path(path).startswith(normalize_path(sys.prefix))
-
-
-def dist_is_local(dist):
-    """
-    Return True if given Distribution object is installed locally
-    (i.e. within current virtualenv).
-
-    Always True if we're not in a virtualenv.
-
-    """
-    return is_local(dist_location(dist))
-
-
-def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python')):
-    """
-    Return a list of installed Distribution objects.
-
-    If ``local_only`` is True (default), only return installations
-    local to the current virtualenv, if in a virtualenv.
-
-    ``skip`` argument is an iterable of lower-case project names to
-    ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
-    skip virtualenv?]
-
-    """
-    if local_only:
-        local_test = dist_is_local
-    else:
-        local_test = lambda d: True
-    return [d for d in pkg_resources.working_set if local_test(d) and d.key not in skip]
-
-
-def egg_link_path(dist):
-    """
-    Return the path where we'd expect to find a .egg-link file for
-    this distribution. (There doesn't seem to be any metadata in the
-    Distribution object for a develop egg that points back to its
-    .egg-link and easy-install.pth files).
-
-    This won't find a globally-installed develop egg if we're in a
-    virtualenv.
-
-    """
-    return os.path.join(site_packages, dist.project_name) + '.egg-link'
-
-
-def dist_location(dist):
-    """
-    Get the site-packages location of this distribution. Generally
-    this is dist.location, except in the case of develop-installed
-    packages, where dist.location is the source code location, and we
-    want to know where the egg-link file is.
-
-    """
-    egg_link = egg_link_path(dist)
-    if os.path.exists(egg_link):
-        return egg_link
-    return dist.location
-
-
-def get_terminal_size():
-    """Returns a tuple (x, y) representing the width(x) and the height(x)
-    in characters of the terminal window."""
-    def ioctl_GWINSZ(fd):
-        try:
-            import fcntl
-            import termios
-            import struct
-            cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
-        '1234'))
-        except:
-            return None
-        if cr == (0, 0):
-            return None
-        if cr == (0, 0):
-            return None
-        return cr
-    cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
-    if not cr:
-        try:
-            fd = os.open(os.ctermid(), os.O_RDONLY)
-            cr = ioctl_GWINSZ(fd)
-            os.close(fd)
-        except:
-            pass
-    if not cr:
-        cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
-    return int(cr[1]), int(cr[0])
-
-
-def unzip_file(filename, location, flatten=True):
-    """Unzip the file (zip file located at filename) to the destination
-    location"""
-    if not os.path.exists(location):
-        os.makedirs(location)
-    zipfp = open(filename, 'rb')
-    try:
-        zip = zipfile.ZipFile(zipfp)
-        leading = has_leading_dir(zip.namelist()) and flatten
-        for name in zip.namelist():
-            data = zip.read(name)
-            fn = name
-            if leading:
-                fn = split_leading_dir(name)[1]
-            fn = os.path.join(location, fn)
-            dir = os.path.dirname(fn)
-            if not os.path.exists(dir):
-                os.makedirs(dir)
-            if fn.endswith('/') or fn.endswith('\\'):
-                # A directory
-                if not os.path.exists(fn):
-                    os.makedirs(fn)
-            else:
-                fp = open(fn, 'wb')
-                try:
-                    fp.write(data)
-                finally:
-                    fp.close()
-    finally:
-        zipfp.close()
-
-
-def untar_file(filename, location):
-    """Untar the file (tar file located at filename) to the destination location"""
-    if not os.path.exists(location):
-        os.makedirs(location)
-    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
-        mode = 'r:gz'
-    elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
-        mode = 'r:bz2'
-    elif filename.lower().endswith('.tar'):
-        mode = 'r'
-    else:
-        logger.warn('Cannot determine compression type for file %s' % filename)
-        mode = 'r:*'
-    tar = tarfile.open(filename, mode)
-    try:
-        # note: python<=2.5 doesnt seem to know about pax headers, filter them
-        leading = has_leading_dir([
-            member.name for member in tar.getmembers()
-            if member.name != 'pax_global_header'
-        ])
-        for member in tar.getmembers():
-            fn = member.name
-            if fn == 'pax_global_header':
-                continue
-            if leading:
-                fn = split_leading_dir(fn)[1]
-            path = os.path.join(location, fn)
-            if member.isdir():
-                if not os.path.exists(path):
-                    os.makedirs(path)
-            elif member.issym():
-                try:
-                    tar._extract_member(member, path)
-                except:
-                    e = sys.exc_info()[1]
-                    # Some corrupt tar files seem to produce this
-                    # (specifically bad symlinks)
-                    logger.warn(
-                        'In the tar file %s the member %s is invalid: %s'
-                        % (filename, member.name, e))
-                    continue
-            else:
-                try:
-                    fp = tar.extractfile(member)
-                except (KeyError, AttributeError):
-                    e = sys.exc_info()[1]
-                    # Some corrupt tar files seem to produce this
-                    # (specifically bad symlinks)
-                    logger.warn(
-                        'In the tar file %s the member %s is invalid: %s'
-                        % (filename, member.name, e))
-                    continue
-                if not os.path.exists(os.path.dirname(path)):
-                    os.makedirs(os.path.dirname(path))
-                destfp = open(path, 'wb')
-                try:
-                    shutil.copyfileobj(fp, destfp)
-                finally:
-                    destfp.close()
-                fp.close()
-    finally:
-        tar.close()
-
-
-def create_download_cache_folder(folder):
-    logger.indent -= 2
-    logger.notify('Creating supposed download cache at %s' % folder)
-    logger.indent += 2
-    os.makedirs(folder)
-
-
-def cache_download(target_file, temp_location, content_type):
-    logger.notify('Storing download in cache at %s' % display_path(target_file))
-    shutil.copyfile(temp_location, target_file)
-    fp = open(target_file+'.content-type', 'w')
-    fp.write(content_type)
-    fp.close()
-    os.unlink(temp_location)
-
-
-def unpack_file(filename, location, content_type, link):
-    filename = os.path.realpath(filename)
-    if (content_type == 'application/zip'
-        or filename.endswith('.zip')
-        or filename.endswith('.pybundle')
-        or zipfile.is_zipfile(filename)):
-        unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
-    elif (content_type == 'application/x-gzip'
-          or tarfile.is_tarfile(filename)
-          or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
-        untar_file(filename, location)
-    elif (content_type and content_type.startswith('text/html')
-          and is_svn_page(file_contents(filename))):
-        # We don't really care about this
-        from pip.vcs.subversion import Subversion
-        Subversion('svn+' + link.url).unpack(location)
-    else:
-        ## FIXME: handle?
-        ## FIXME: magic signatures?
-        logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
-                     % (filename, location, content_type))
-        raise InstallationError('Cannot determine archive format of %s' % location)
-
-
-
diff --git a/vendor/pip-1.2.1/pip/vcs/__init__.py b/vendor/pip-1.2.1/pip/vcs/__init__.py
deleted file mode 100644
index a2137e96a865167dfa14cec1aac3174d58ba9602..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/vcs/__init__.py
+++ /dev/null
@@ -1,244 +0,0 @@
-"""Handles all VCS (version control) support"""
-
-import os
-import shutil
-
-from pip.backwardcompat import urlparse, urllib
-from pip.log import logger
-from pip.util import (display_path, backup_dir, find_command,
-                      ask, rmtree, ask_path_exists)
-
-
-__all__ = ['vcs', 'get_src_requirement']
-
-
-class VcsSupport(object):
-    _registry = {}
-    schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
-
-    def __init__(self):
-        # Register more schemes with urlparse for various version control systems
-        urlparse.uses_netloc.extend(self.schemes)
-        urlparse.uses_fragment.extend(self.schemes)
-        super(VcsSupport, self).__init__()
-
-    def __iter__(self):
-        return self._registry.__iter__()
-
-    @property
-    def backends(self):
-        return list(self._registry.values())
-
-    @property
-    def dirnames(self):
-        return [backend.dirname for backend in self.backends]
-
-    @property
-    def all_schemes(self):
-        schemes = []
-        for backend in self.backends:
-            schemes.extend(backend.schemes)
-        return schemes
-
-    def register(self, cls):
-        if not hasattr(cls, 'name'):
-            logger.warn('Cannot register VCS %s' % cls.__name__)
-            return
-        if cls.name not in self._registry:
-            self._registry[cls.name] = cls
-
-    def unregister(self, cls=None, name=None):
-        if name in self._registry:
-            del self._registry[name]
-        elif cls in self._registry.values():
-            del self._registry[cls.name]
-        else:
-            logger.warn('Cannot unregister because no class or name given')
-
-    def get_backend_name(self, location):
-        """
-        Return the name of the version control backend if found at given
-        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
-        """
-        for vc_type in self._registry.values():
-            path = os.path.join(location, vc_type.dirname)
-            if os.path.exists(path):
-                return vc_type.name
-        return None
-
-    def get_backend(self, name):
-        name = name.lower()
-        if name in self._registry:
-            return self._registry[name]
-
-    def get_backend_from_location(self, location):
-        vc_type = self.get_backend_name(location)
-        if vc_type:
-            return self.get_backend(vc_type)
-        return None
-
-
-vcs = VcsSupport()
-
-
-class VersionControl(object):
-    name = ''
-    dirname = ''
-
-    def __init__(self, url=None, *args, **kwargs):
-        self.url = url
-        self._cmd = None
-        super(VersionControl, self).__init__(*args, **kwargs)
-
-    def _filter(self, line):
-        return (logger.INFO, line)
-
-    def _is_local_repository(self, repo):
-        """
-           posix absolute paths start with os.path.sep,
-           win32 ones ones start with drive (like c:\\folder)
-        """
-        drive, tail = os.path.splitdrive(repo)
-        return repo.startswith(os.path.sep) or drive
-
-    @property
-    def cmd(self):
-        if self._cmd is not None:
-            return self._cmd
-        command = find_command(self.name)
-        logger.info('Found command %r at %r' % (self.name, command))
-        self._cmd = command
-        return command
-
-    def get_url_rev(self):
-        """
-        Returns the correct repository URL and revision by parsing the given
-        repository URL
-        """
-        url = self.url.split('+', 1)[1]
-        scheme, netloc, path, query, frag = urlparse.urlsplit(url)
-        rev = None
-        if '@' in path:
-            path, rev = path.rsplit('@', 1)
-        url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
-        return url, rev
-
-    def get_info(self, location):
-        """
-        Returns (url, revision), where both are strings
-        """
-        assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
-        return self.get_url(location), self.get_revision(location)
-
-    def normalize_url(self, url):
-        """
-        Normalize a URL for comparison by unquoting it and removing any trailing slash.
-        """
-        return urllib.unquote(url).rstrip('/')
-
-    def compare_urls(self, url1, url2):
-        """
-        Compare two repo URLs for identity, ignoring incidental differences.
-        """
-        return (self.normalize_url(url1) == self.normalize_url(url2))
-
-    def parse_vcs_bundle_file(self, content):
-        """
-        Takes the contents of the bundled text file that explains how to revert
-        the stripped off version control data of the given package and returns
-        the URL and revision of it.
-        """
-        raise NotImplementedError
-
-    def obtain(self, dest):
-        """
-        Called when installing or updating an editable package, takes the
-        source path of the checkout.
-        """
-        raise NotImplementedError
-
-    def switch(self, dest, url, rev_options):
-        """
-        Switch the repo at ``dest`` to point to ``URL``.
-        """
-        raise NotImplemented
-
-    def update(self, dest, rev_options):
-        """
-        Update an already-existing repo to the given ``rev_options``.
-        """
-        raise NotImplementedError
-
-    def check_destination(self, dest, url, rev_options, rev_display):
-        """
-        Prepare a location to receive a checkout/clone.
-
-        Return True if the location is ready for (and requires) a
-        checkout/clone, False otherwise.
-        """
-        checkout = True
-        prompt = False
-        if os.path.exists(dest):
-            checkout = False
-            if os.path.exists(os.path.join(dest, self.dirname)):
-                existing_url = self.get_url(dest)
-                if self.compare_urls(existing_url, url):
-                    logger.info('%s in %s exists, and has correct URL (%s)' %
-                                (self.repo_name.title(), display_path(dest),
-                                 url))
-                    logger.notify('Updating %s %s%s' %
-                                  (display_path(dest), self.repo_name,
-                                   rev_display))
-                    self.update(dest, rev_options)
-                else:
-                    logger.warn('%s %s in %s exists with URL %s' %
-                                (self.name, self.repo_name,
-                                 display_path(dest), existing_url))
-                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
-                              ('s', 'i', 'w', 'b'))
-            else:
-                logger.warn('Directory %s already exists, '
-                            'and is not a %s %s.' %
-                            (dest, self.name, self.repo_name))
-                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
-        if prompt:
-            logger.warn('The plan is to install the %s repository %s' %
-                        (self.name, url))
-            response = ask_path_exists('What to do?  %s' % prompt[0],
-                                       prompt[1])
-
-            if response == 's':
-                logger.notify('Switching %s %s to %s%s' %
-                              (self.repo_name, display_path(dest), url,
-                               rev_display))
-                self.switch(dest, url, rev_options)
-            elif response == 'i':
-                # do nothing
-                pass
-            elif response == 'w':
-                logger.warn('Deleting %s' % display_path(dest))
-                rmtree(dest)
-                checkout = True
-            elif response == 'b':
-                dest_dir = backup_dir(dest)
-                logger.warn('Backing up %s to %s'
-                            % (display_path(dest), dest_dir))
-                shutil.move(dest, dest_dir)
-                checkout = True
-        return checkout
-
-    def unpack(self, location):
-        if os.path.exists(location):
-            rmtree(location)
-        self.obtain(location)
-
-    def get_src_requirement(self, dist, location, find_tags=False):
-        raise NotImplementedError
-
-
-def get_src_requirement(dist, location, find_tags):
-    version_control = vcs.get_backend_from_location(location)
-    if version_control:
-        return version_control().get_src_requirement(dist, location, find_tags)
-    logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
-    return dist.as_requirement()
diff --git a/vendor/pip-1.2.1/pip/vcs/bazaar.py b/vendor/pip-1.2.1/pip/vcs/bazaar.py
deleted file mode 100644
index 5d52777714cc610fad635752141268576ba8fe88..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/vcs/bazaar.py
+++ /dev/null
@@ -1,129 +0,0 @@
-import os
-import tempfile
-import re
-from pip import call_subprocess
-from pip.backwardcompat import urlparse
-from pip.log import logger
-from pip.util import rmtree, display_path
-from pip.vcs import vcs, VersionControl
-from pip.download import path_to_url2
-
-
-class Bazaar(VersionControl):
-    name = 'bzr'
-    dirname = '.bzr'
-    repo_name = 'branch'
-    bundle_file = 'bzr-branch.txt'
-    schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', 'bzr+lp')
-    guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
-             'bzr branch -r %(rev)s %(url)s .\n')
-
-    def __init__(self, url=None, *args, **kwargs):
-        super(Bazaar, self).__init__(url, *args, **kwargs)
-        urlparse.non_hierarchical.extend(['lp'])
-        urlparse.uses_fragment.extend(['lp'])
-
-    def parse_vcs_bundle_file(self, content):
-        url = rev = None
-        for line in content.splitlines():
-            if not line.strip() or line.strip().startswith('#'):
-                continue
-            match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
-            if match:
-                rev = match.group(1).strip()
-            url = line[match.end():].strip().split(None, 1)[0]
-            if url and rev:
-                return url, rev
-        return None, None
-
-    def export(self, location):
-        """Export the Bazaar repository at the url to the destination location"""
-        temp_dir = tempfile.mkdtemp('-export', 'pip-')
-        self.unpack(temp_dir)
-        if os.path.exists(location):
-            # Remove the location to make sure Bazaar can export it correctly
-            rmtree(location)
-        try:
-            call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
-                            filter_stdout=self._filter, show_stdout=False)
-        finally:
-            rmtree(temp_dir)
-
-    def switch(self, dest, url, rev_options):
-        call_subprocess([self.cmd, 'switch', url], cwd=dest)
-
-    def update(self, dest, rev_options):
-        call_subprocess(
-            [self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
-
-    def obtain(self, dest):
-        url, rev = self.get_url_rev()
-        if rev:
-            rev_options = ['-r', rev]
-            rev_display = ' (to revision %s)' % rev
-        else:
-            rev_options = []
-            rev_display = ''
-        if self.check_destination(dest, url, rev_options, rev_display):
-            logger.notify('Checking out %s%s to %s'
-                          % (url, rev_display, display_path(dest)))
-            call_subprocess(
-                [self.cmd, 'branch', '-q'] + rev_options + [url, dest])
-
-    def get_url_rev(self):
-        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
-        url, rev = super(Bazaar, self).get_url_rev()
-        if url.startswith('ssh://'):
-            url = 'bzr+' + url
-        return url, rev
-
-    def get_url(self, location):
-        urls = call_subprocess(
-            [self.cmd, 'info'], show_stdout=False, cwd=location)
-        for line in urls.splitlines():
-            line = line.strip()
-            for x in ('checkout of branch: ',
-                      'parent branch: '):
-                if line.startswith(x):
-                    repo = line.split(x)[1]
-                    if self._is_local_repository(repo):
-                        return path_to_url2(repo)
-                    return repo
-        return None
-
-    def get_revision(self, location):
-        revision = call_subprocess(
-            [self.cmd, 'revno'], show_stdout=False, cwd=location)
-        return revision.splitlines()[-1]
-
-    def get_tag_revs(self, location):
-        tags = call_subprocess(
-            [self.cmd, 'tags'], show_stdout=False, cwd=location)
-        tag_revs = []
-        for line in tags.splitlines():
-            tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
-            if tags_match:
-                tag = tags_match.group(1)
-                rev = tags_match.group(2)
-                tag_revs.append((rev.strip(), tag.strip()))
-        return dict(tag_revs)
-
-    def get_src_requirement(self, dist, location, find_tags):
-        repo = self.get_url(location)
-        if not repo.lower().startswith('bzr:'):
-            repo = 'bzr+' + repo
-        egg_project_name = dist.egg_name().split('-', 1)[0]
-        if not repo:
-            return None
-        current_rev = self.get_revision(location)
-        tag_revs = self.get_tag_revs(location)
-
-        if current_rev in tag_revs:
-            # It's a tag
-            full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
-        else:
-            full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
-        return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
-
-
-vcs.register(Bazaar)
diff --git a/vendor/pip-1.2.1/pip/vcs/git.py b/vendor/pip-1.2.1/pip/vcs/git.py
deleted file mode 100644
index ecaf19f5076963556521cf9f2a16146e007c7b34..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/vcs/git.py
+++ /dev/null
@@ -1,206 +0,0 @@
-import tempfile
-import re
-from pip import call_subprocess
-from pip.util import display_path, rmtree
-from pip.vcs import vcs, VersionControl
-from pip.log import logger
-from pip.backwardcompat import url2pathname, urlparse
-urlsplit = urlparse.urlsplit
-urlunsplit = urlparse.urlunsplit
-
-
-class Git(VersionControl):
-    name = 'git'
-    dirname = '.git'
-    repo_name = 'clone'
-    schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file')
-    bundle_file = 'git-clone.txt'
-    guide = ('# This was a Git repo; to make it a repo again run:\n'
-        'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
-
-    def __init__(self, url=None, *args, **kwargs):
-
-        # Works around an apparent Git bug
-        # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
-        if url:
-            scheme, netloc, path, query, fragment = urlsplit(url)
-            if scheme.endswith('file'):
-                initial_slashes = path[:-len(path.lstrip('/'))]
-                newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/')
-                url = urlunsplit((scheme, netloc, newpath, query, fragment))
-                after_plus = scheme.find('+')+1
-                url = scheme[:after_plus]+ urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment))
-
-        super(Git, self).__init__(url, *args, **kwargs)
-
-    def parse_vcs_bundle_file(self, content):
-        url = rev = None
-        for line in content.splitlines():
-            if not line.strip() or line.strip().startswith('#'):
-                continue
-            url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
-            if url_match:
-                url = url_match.group(1).strip()
-            rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
-            if rev_match:
-                rev = rev_match.group(1).strip()
-            if url and rev:
-                return url, rev
-        return None, None
-
-    def export(self, location):
-        """Export the Git repository at the url to the destination location"""
-        temp_dir = tempfile.mkdtemp('-export', 'pip-')
-        self.unpack(temp_dir)
-        try:
-            if not location.endswith('/'):
-                location = location + '/'
-            call_subprocess(
-                [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
-                filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
-        finally:
-            rmtree(temp_dir)
-
-    def check_rev_options(self, rev, dest, rev_options):
-        """Check the revision options before checkout to compensate that tags
-        and branches may need origin/ as a prefix.
-        Returns the SHA1 of the branch or tag if found.
-        """
-        revisions = self.get_tag_revs(dest)
-        revisions.update(self.get_branch_revs(dest))
-
-        origin_rev = 'origin/%s' % rev
-        if origin_rev in revisions:
-            # remote branch
-            return [revisions[origin_rev]]
-        elif rev in revisions:
-            # a local tag or branch name
-            return [revisions[rev]]
-        else:
-            logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
-            return rev_options
-
-    def switch(self, dest, url, rev_options):
-        call_subprocess(
-            [self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
-        call_subprocess(
-            [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
-
-    def update(self, dest, rev_options):
-        # First fetch changes from the default remote
-        call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
-        # Then reset to wanted revision (maby even origin/master)
-        if rev_options:
-            rev_options = self.check_rev_options(rev_options[0], dest, rev_options)
-        call_subprocess([self.cmd, 'reset', '--hard', '-q'] + rev_options, cwd=dest)
-
-    def obtain(self, dest):
-        url, rev = self.get_url_rev()
-        if rev:
-            rev_options = [rev]
-            rev_display = ' (to %s)' % rev
-        else:
-            rev_options = ['origin/master']
-            rev_display = ''
-        if self.check_destination(dest, url, rev_options, rev_display):
-            logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
-            call_subprocess([self.cmd, 'clone', '-q', url, dest])
-            if rev:
-                rev_options = self.check_rev_options(rev, dest, rev_options)
-                # Only do a checkout if rev_options differs from HEAD
-                if not self.get_revision(dest).startswith(rev_options[0]):
-                    call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
-
-    def get_url(self, location):
-        url = call_subprocess(
-            [self.cmd, 'config', 'remote.origin.url'],
-            show_stdout=False, cwd=location)
-        return url.strip()
-
-    def get_revision(self, location):
-        current_rev = call_subprocess(
-            [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
-        return current_rev.strip()
-
-    def get_tag_revs(self, location):
-        tags = self._get_all_tag_names(location)
-        tag_revs = {}
-        for line in tags.splitlines():
-            tag = line.strip()
-            rev = self._get_revision_from_rev_parse(tag, location)
-            tag_revs[tag] = rev.strip()
-        return tag_revs
-
-    def get_branch_revs(self, location):
-        branches = self._get_all_branch_names(location)
-        branch_revs = {}
-        for line in branches.splitlines():
-            if '(no branch)' in line:
-                continue
-            line = line.split('->')[0].strip()
-            # actual branch case
-            branch = "".join(b for b in line.split() if b != '*')
-            rev = self._get_revision_from_rev_parse(branch, location)
-            branch_revs[branch] = rev.strip()
-        return branch_revs
-
-    def get_src_requirement(self, dist, location, find_tags):
-        repo = self.get_url(location)
-        if not repo.lower().startswith('git:'):
-            repo = 'git+' + repo
-        egg_project_name = dist.egg_name().split('-', 1)[0]
-        if not repo:
-            return None
-        current_rev = self.get_revision(location)
-        tag_revs = self.get_tag_revs(location)
-        branch_revs = self.get_branch_revs(location)
-
-        if current_rev in tag_revs:
-            # It's a tag
-            full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
-        elif (current_rev in branch_revs and
-              branch_revs[current_rev] != 'origin/master'):
-            # It's the head of a branch
-            full_egg_name = '%s-%s' % (egg_project_name,
-                                       branch_revs[current_rev].replace('origin/', ''))
-        else:
-            full_egg_name = '%s-dev' % egg_project_name
-
-        return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
-
-    def get_url_rev(self):
-        """
-        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
-        That's required because although they use SSH they sometimes doesn't
-        work with a ssh:// scheme (e.g. Github). But we need a scheme for
-        parsing. Hence we remove it again afterwards and return it as a stub.
-        """
-        if not '://' in self.url:
-            assert not 'file:' in self.url
-            self.url = self.url.replace('git+', 'git+ssh://')
-            url, rev = super(Git, self).get_url_rev()
-            url = url.replace('ssh://', '')
-        else:
-            url, rev = super(Git, self).get_url_rev()
-
-        return url, rev
-
-    def _get_all_tag_names(self, location):
-        return call_subprocess([self.cmd, 'tag', '-l'],
-                               show_stdout=False,
-                               raise_on_returncode=False,
-                               cwd=location)
-
-    def _get_all_branch_names(self, location):
-        remote_branches = call_subprocess([self.cmd, 'branch', '-r'],
-                                          show_stdout=False, cwd=location)
-        local_branches = call_subprocess([self.cmd, 'branch', '-l'],
-                                         show_stdout=False, cwd=location)
-        return remote_branches + local_branches
-
-    def _get_revision_from_rev_parse(self, name, location):
-        return call_subprocess([self.cmd, 'rev-parse', name],
-                               show_stdout=False, cwd=location)
-
-
-vcs.register(Git)
diff --git a/vendor/pip-1.2.1/pip/vcs/mercurial.py b/vendor/pip-1.2.1/pip/vcs/mercurial.py
deleted file mode 100644
index fbafccc5f828e992ad9e690dc3b379bf661b4586..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/vcs/mercurial.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import os
-import tempfile
-import re
-import sys
-from pip import call_subprocess
-from pip.util import display_path, rmtree
-from pip.log import logger
-from pip.vcs import vcs, VersionControl
-from pip.download import path_to_url2
-from pip.backwardcompat import ConfigParser
-
-
-class Mercurial(VersionControl):
-    name = 'hg'
-    dirname = '.hg'
-    repo_name = 'clone'
-    schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
-    bundle_file = 'hg-clone.txt'
-    guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
-            'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
-
-    def parse_vcs_bundle_file(self, content):
-        url = rev = None
-        for line in content.splitlines():
-            if not line.strip() or line.strip().startswith('#'):
-                continue
-            url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
-            if url_match:
-                url = url_match.group(1).strip()
-            rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
-            if rev_match:
-                rev = rev_match.group(1).strip()
-            if url and rev:
-                return url, rev
-        return None, None
-
-    def export(self, location):
-        """Export the Hg repository at the url to the destination location"""
-        temp_dir = tempfile.mkdtemp('-export', 'pip-')
-        self.unpack(temp_dir)
-        try:
-            call_subprocess(
-                [self.cmd, 'archive', location],
-                filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
-        finally:
-            rmtree(temp_dir)
-
-    def switch(self, dest, url, rev_options):
-        repo_config = os.path.join(dest, self.dirname, 'hgrc')
-        config = ConfigParser.SafeConfigParser()
-        try:
-            config.read(repo_config)
-            config.set('paths', 'default', url)
-            config_file = open(repo_config, 'w')
-            config.write(config_file)
-            config_file.close()
-        except (OSError, ConfigParser.NoSectionError):
-            e = sys.exc_info()[1]
-            logger.warn(
-                'Could not switch Mercurial repository to %s: %s'
-                % (url, e))
-        else:
-            call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
-
-    def update(self, dest, rev_options):
-        call_subprocess([self.cmd, 'pull', '-q'], cwd=dest)
-        call_subprocess(
-            [self.cmd, 'update', '-q'] + rev_options, cwd=dest)
-
-    def obtain(self, dest):
-        url, rev = self.get_url_rev()
-        if rev:
-            rev_options = [rev]
-            rev_display = ' (to revision %s)' % rev
-        else:
-            rev_options = []
-            rev_display = ''
-        if self.check_destination(dest, url, rev_options, rev_display):
-            logger.notify('Cloning hg %s%s to %s'
-                          % (url, rev_display, display_path(dest)))
-            call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
-            call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
-
-    def get_url(self, location):
-        url = call_subprocess(
-            [self.cmd, 'showconfig', 'paths.default'],
-            show_stdout=False, cwd=location).strip()
-        if self._is_local_repository(url):
-            url = path_to_url2(url)
-        return url.strip()
-
-    def get_tag_revs(self, location):
-        tags = call_subprocess(
-            [self.cmd, 'tags'], show_stdout=False, cwd=location)
-        tag_revs = []
-        for line in tags.splitlines():
-            tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
-            if tags_match:
-                tag = tags_match.group(1)
-                rev = tags_match.group(2)
-                if "tip" != tag:
-                    tag_revs.append((rev.strip(), tag.strip()))
-        return dict(tag_revs)
-
-    def get_branch_revs(self, location):
-        branches = call_subprocess(
-            [self.cmd, 'branches'], show_stdout=False, cwd=location)
-        branch_revs = []
-        for line in branches.splitlines():
-            branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
-            if branches_match:
-                branch = branches_match.group(1)
-                rev = branches_match.group(2)
-                if "default" != branch:
-                    branch_revs.append((rev.strip(), branch.strip()))
-        return dict(branch_revs)
-
-    def get_revision(self, location):
-        current_revision = call_subprocess(
-            [self.cmd, 'parents', '--template={rev}'],
-            show_stdout=False, cwd=location).strip()
-        return current_revision
-
-    def get_revision_hash(self, location):
-        current_rev_hash = call_subprocess(
-            [self.cmd, 'parents', '--template={node}'],
-            show_stdout=False, cwd=location).strip()
-        return current_rev_hash
-
-    def get_src_requirement(self, dist, location, find_tags):
-        repo = self.get_url(location)
-        if not repo.lower().startswith('hg:'):
-            repo = 'hg+' + repo
-        egg_project_name = dist.egg_name().split('-', 1)[0]
-        if not repo:
-            return None
-        current_rev = self.get_revision(location)
-        current_rev_hash = self.get_revision_hash(location)
-        tag_revs = self.get_tag_revs(location)
-        branch_revs = self.get_branch_revs(location)
-        if current_rev in tag_revs:
-            # It's a tag
-            full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
-        elif current_rev in branch_revs:
-            # It's the tip of a branch
-            full_egg_name = '%s-%s' % (egg_project_name, branch_revs[current_rev])
-        else:
-            full_egg_name = '%s-dev' % egg_project_name
-        return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
-
-vcs.register(Mercurial)
diff --git a/vendor/pip-1.2.1/pip/vcs/subversion.py b/vendor/pip-1.2.1/pip/vcs/subversion.py
deleted file mode 100644
index f54eee664db19dc8a751446a50e7d0de368f3de2..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/pip/vcs/subversion.py
+++ /dev/null
@@ -1,272 +0,0 @@
-import os
-import re
-from pip.backwardcompat import urlparse
-from pip import call_subprocess, InstallationError
-from pip.index import Link
-from pip.util import rmtree, display_path
-from pip.log import logger
-from pip.vcs import vcs, VersionControl
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile('committed-rev="(\d+)"')
-_svn_url_re = re.compile(r'URL: (.+)')
-_svn_revision_re = re.compile(r'Revision: (.+)')
-_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
-_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
-
-
-class Subversion(VersionControl):
-    name = 'svn'
-    dirname = '.svn'
-    repo_name = 'checkout'
-    schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
-    bundle_file = 'svn-checkout.txt'
-    guide = ('# This was an svn checkout; to make it a checkout again run:\n'
-            'svn checkout --force -r %(rev)s %(url)s .\n')
-
-    def get_info(self, location):
-        """Returns (url, revision), where both are strings"""
-        assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
-        output = call_subprocess(
-            [self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
-        match = _svn_url_re.search(output)
-        if not match:
-            logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
-            logger.info('Output that cannot be parsed: \n%s' % output)
-            return None, None
-        url = match.group(1).strip()
-        match = _svn_revision_re.search(output)
-        if not match:
-            logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
-            logger.info('Output that cannot be parsed: \n%s' % output)
-            return url, None
-        return url, match.group(1)
-
-    def parse_vcs_bundle_file(self, content):
-        for line in content.splitlines():
-            if not line.strip() or line.strip().startswith('#'):
-                continue
-            match = re.search(r'^-r\s*([^ ])?', line)
-            if not match:
-                return None, None
-            rev = match.group(1)
-            rest = line[match.end():].strip().split(None, 1)[0]
-            return rest, rev
-        return None, None
-
-    def export(self, location):
-        """Export the svn repository at the url to the destination location"""
-        url, rev = self.get_url_rev()
-        rev_options = get_rev_options(url, rev)
-        logger.notify('Exporting svn repository %s to %s' % (url, location))
-        logger.indent += 2
-        try:
-            if os.path.exists(location):
-                # Subversion doesn't like to check out over an existing directory
-                # --force fixes this, but was only added in svn 1.5
-                rmtree(location)
-            call_subprocess(
-                [self.cmd, 'export'] + rev_options + [url, location],
-                filter_stdout=self._filter, show_stdout=False)
-        finally:
-            logger.indent -= 2
-
-    def switch(self, dest, url, rev_options):
-        call_subprocess(
-            [self.cmd, 'switch'] + rev_options + [url, dest])
-
-    def update(self, dest, rev_options):
-        call_subprocess(
-            [self.cmd, 'update'] + rev_options + [dest])
-
-    def obtain(self, dest):
-        url, rev = self.get_url_rev()
-        rev_options = get_rev_options(url, rev)
-        if rev:
-            rev_display = ' (to revision %s)' % rev
-        else:
-            rev_display = ''
-        if self.check_destination(dest, url, rev_options, rev_display):
-            logger.notify('Checking out %s%s to %s'
-                          % (url, rev_display, display_path(dest)))
-            call_subprocess(
-                [self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
-
-    def get_location(self, dist, dependency_links):
-        for url in dependency_links:
-            egg_fragment = Link(url).egg_fragment
-            if not egg_fragment:
-                continue
-            if '-' in egg_fragment:
-                ## FIXME: will this work when a package has - in the name?
-                key = '-'.join(egg_fragment.split('-')[:-1]).lower()
-            else:
-                key = egg_fragment
-            if key == dist.key:
-                return url.split('#', 1)[0]
-        return None
-
-    def get_revision(self, location):
-        """
-        Return the maximum revision for all files under a given location
-        """
-        # Note: taken from setuptools.command.egg_info
-        revision = 0
-
-        for base, dirs, files in os.walk(location):
-            if self.dirname not in dirs:
-                dirs[:] = []
-                continue    # no sense walking uncontrolled subdirs
-            dirs.remove(self.dirname)
-            entries_fn = os.path.join(base, self.dirname, 'entries')
-            if not os.path.exists(entries_fn):
-                ## FIXME: should we warn?
-                continue
-
-            dirurl, localrev = self._get_svn_url_rev(base)
-
-            if base == location:
-                base_url = dirurl+'/'   # save the root url
-            elif not dirurl or not dirurl.startswith(base_url):
-                dirs[:] = []
-                continue    # not part of the same svn tree, skip it
-            revision = max(revision, localrev)
-        return revision
-
-    def get_url_rev(self):
-        # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
-        url, rev = super(Subversion, self).get_url_rev()
-        if url.startswith('ssh://'):
-            url = 'svn+' + url
-        return url, rev
-
-    def get_url(self, location):
-        # In cases where the source is in a subdirectory, not alongside setup.py
-        # we have to look up in the location until we find a real setup.py
-        orig_location = location
-        while not os.path.exists(os.path.join(location, 'setup.py')):
-            last_location = location
-            location = os.path.dirname(location)
-            if location == last_location:
-                # We've traversed up to the root of the filesystem without finding setup.py
-                logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
-                            % orig_location)
-                return None
-
-        return self._get_svn_url_rev(location)[0]
-
-    def _get_svn_url_rev(self, location):
-        f = open(os.path.join(location, self.dirname, 'entries'))
-        data = f.read()
-        f.close()
-        if data.startswith('8') or data.startswith('9') or data.startswith('10'):
-            data = list(map(str.splitlines, data.split('\n\x0c\n')))
-            del data[0][0]  # get rid of the '8'
-            url = data[0][3]
-            revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
-        elif data.startswith('<?xml'):
-            match = _svn_xml_url_re.search(data)
-            if not match:
-                raise ValueError('Badly formatted data: %r' % data)
-            url = match.group(1)    # get repository URL
-            revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
-        else:
-            try:
-                # subversion >= 1.7
-                xml = call_subprocess([self.cmd, 'info', '--xml', location], show_stdout=False)
-                url = _svn_info_xml_url_re.search(xml).group(1)
-                revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
-            except InstallationError:
-                url, revs = None, []
-
-        if revs:
-            rev = max(revs)
-        else:
-            rev = 0
-
-        return url, rev
-
-    def get_tag_revs(self, svn_tag_url):
-        stdout = call_subprocess(
-            [self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
-        results = []
-        for line in stdout.splitlines():
-            parts = line.split()
-            rev = int(parts[0])
-            tag = parts[-1].strip('/')
-            results.append((tag, rev))
-        return results
-
-    def find_tag_match(self, rev, tag_revs):
-        best_match_rev = None
-        best_tag = None
-        for tag, tag_rev in tag_revs:
-            if (tag_rev > rev and
-                (best_match_rev is None or best_match_rev > tag_rev)):
-                # FIXME: Is best_match > tag_rev really possible?
-                # or is it a sign something is wacky?
-                best_match_rev = tag_rev
-                best_tag = tag
-        return best_tag
-
-    def get_src_requirement(self, dist, location, find_tags=False):
-        repo = self.get_url(location)
-        if repo is None:
-            return None
-        parts = repo.split('/')
-        ## FIXME: why not project name?
-        egg_project_name = dist.egg_name().split('-', 1)[0]
-        rev = self.get_revision(location)
-        if parts[-2] in ('tags', 'tag'):
-            # It's a tag, perfect!
-            full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
-        elif parts[-2] in ('branches', 'branch'):
-            # It's a branch :(
-            full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
-        elif parts[-1] == 'trunk':
-            # Trunk :-/
-            full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
-            if find_tags:
-                tag_url = '/'.join(parts[:-1]) + '/tags'
-                tag_revs = self.get_tag_revs(tag_url)
-                match = self.find_tag_match(rev, tag_revs)
-                if match:
-                    logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
-                    repo = '%s/%s' % (tag_url, match)
-                    full_egg_name = '%s-%s' % (egg_project_name, match)
-        else:
-            # Don't know what it is
-            logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
-            full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
-        return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
-
-
-def get_rev_options(url, rev):
-    if rev:
-        rev_options = ['-r', rev]
-    else:
-        rev_options = []
-
-    r = urlparse.urlsplit(url)
-    if hasattr(r, 'username'):
-        # >= Python-2.5
-        username, password = r.username, r.password
-    else:
-        netloc = r[1]
-        if '@' in netloc:
-            auth = netloc.split('@')[0]
-            if ':' in auth:
-                username, password = auth.split(':', 1)
-            else:
-                username, password = auth, None
-        else:
-            username, password = None, None
-
-    if username:
-        rev_options += ['--username', username]
-    if password:
-        rev_options += ['--password', password]
-    return rev_options
-
-
-vcs.register(Subversion)
diff --git a/vendor/pip-1.2.1/setup.cfg b/vendor/pip-1.2.1/setup.cfg
deleted file mode 100644
index ce26f6afe4d99429529d4fd9d5c0600c7fec4d3e..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/setup.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[nosetests]
-where=tests
diff --git a/vendor/pip-1.2.1/setup.py b/vendor/pip-1.2.1/setup.py
deleted file mode 100644
index 3227621177c6f982c1904099d24ac0bb5c20be54..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/setup.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import sys
-import os
-from setuptools import setup
-
-# If you change this version, change it also in docs/conf.py
-version = "1.1"
-
-doc_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "docs")
-index_filename = os.path.join(doc_dir, "index.txt")
-news_filename = os.path.join(doc_dir, "news.txt")
-long_description = """
-
-The main website for pip is `www.pip-installer.org
-<http://www.pip-installer.org>`_.  You can also install
-the `in-development version <https://github.com/pypa/pip/tarball/develop#egg=pip-dev>`_
-of pip with ``easy_install pip==dev``.
-
-"""
-f = open(index_filename)
-# remove the toctree from sphinx index, as it breaks long_description
-parts = f.read().split("split here", 2)
-long_description = parts[0] + long_description + parts[2]
-f.close()
-f = open(news_filename)
-long_description += "\n\n" + f.read()
-f.close()
-
-setup(name="pip",
-      version=version,
-      description="pip installs packages. Python packages. An easy_install replacement",
-      long_description=long_description,
-      classifiers=[
-        'Development Status :: 5 - Production/Stable',
-        'Intended Audience :: Developers',
-        'License :: OSI Approved :: MIT License',
-        'Topic :: Software Development :: Build Tools',
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.4',
-        'Programming Language :: Python :: 2.5',
-        'Programming Language :: Python :: 2.6',
-        'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.1',
-        'Programming Language :: Python :: 3.2',
-      ],
-      keywords='easy_install distutils setuptools egg virtualenv',
-      author='The pip developers',
-      author_email='python-virtualenv@groups.google.com',
-      url='http://www.pip-installer.org',
-      license='MIT',
-      packages=['pip', 'pip.commands', 'pip.vcs'],
-      entry_points=dict(console_scripts=['pip=pip:main', 'pip-%s=pip:main' % sys.version[:3]]),
-      test_suite='nose.collector',
-      tests_require=['nose', 'virtualenv>=1.7', 'scripttest>=1.1.1', 'mock'],
-      zip_safe=False)
diff --git a/vendor/pip-1.2.1/tests/__init__.py b/vendor/pip-1.2.1/tests/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/vendor/pip-1.2.1/tests/in dex/FSPkg/FSPkg-0.1dev.tar.gz b/vendor/pip-1.2.1/tests/in dex/FSPkg/FSPkg-0.1dev.tar.gz
deleted file mode 100644
index 7fa7c10c2398d841d046838fa08b19188951cb90..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1/tests/in dex/FSPkg/FSPkg-0.1dev.tar.gz and /dev/null differ
diff --git a/vendor/pip-1.2.1/tests/in dex/FSPkg/index.html b/vendor/pip-1.2.1/tests/in dex/FSPkg/index.html
deleted file mode 100644
index cf4f404e32f89f277f1f0c0de8fbf9fb9412cf45..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/in dex/FSPkg/index.html	
+++ /dev/null
@@ -1,3 +0,0 @@
-<html><head><title>Links for FSPkg</title></head><body><h1>Links for FSPkg</h1><a href="./FSPkg-0.1dev.tar.gz#md5=ba6e46bed32c5b6d20f974d7d889bdb2">FSPkg-0.1dev.tar.gz</a><br/>
-<a href="file://../../packages/FSPkg">Source</a><br/> 
-</body></html>
diff --git a/vendor/pip-1.2.1/tests/in dex/README.txt b/vendor/pip-1.2.1/tests/in dex/README.txt
deleted file mode 100644
index d26726fe9bb91b2d2a468f3b89a57d38c242c130..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/in dex/README.txt	
+++ /dev/null
@@ -1,2 +0,0 @@
-This directory has the odd space in its name in order to test urlquoting and
-dequoting of file:// scheme index URLs.
diff --git a/vendor/pip-1.2.1/tests/local_repos.py b/vendor/pip-1.2.1/tests/local_repos.py
deleted file mode 100644
index 4a1a171be03090bc0e5e6b50448cdf26f614857d..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/local_repos.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import os
-import subprocess
-from pip.vcs import subversion, git, bazaar, mercurial
-from pip.backwardcompat import urlretrieve
-from tests.test_pip import path_to_url
-from tests.pypi_server import PyPIProxy
-
-
-if hasattr(subprocess, "check_call"):
-    subprocess_call = subprocess.check_call
-else:
-    subprocess_call = subprocess.call
-
-
-def _create_initools_repository():
-    subprocess_call('svnadmin create INITools'.split(), cwd=_get_vcs_folder())
-
-
-def _dump_initools_repository():
-    filename, _ = urlretrieve('http://bitbucket.org/hltbra/pip-initools-dump/raw/8b55c908a320/INITools_modified.dump')
-    initools_folder = os.path.join(_get_vcs_folder(), 'INITools')
-    devnull = open(os.devnull, 'w')
-    dump = open(filename)
-    subprocess_call(['svnadmin', 'load', initools_folder], stdin=dump, stdout=devnull)
-    dump.close()
-    devnull.close()
-    os.remove(filename)
-
-
-def _create_svn_repository_for_initools():
-    tests_cache = _get_vcs_folder()
-    if not os.path.exists(os.path.join(tests_cache, 'INITools')):
-        _create_initools_repository()
-        _dump_initools_repository()
-
-
-def _get_vcs_folder():
-    folder_name = PyPIProxy.CACHE_PATH
-    if not os.path.exists(folder_name):
-        os.mkdir(folder_name)
-    return folder_name
-
-
-def _get_vcs_and_checkout_url(remote_repository):
-    tests_cache = _get_vcs_folder()
-    vcs_classes = {'svn': subversion.Subversion,
-                   'git': git.Git,
-                   'bzr': bazaar.Bazaar,
-                   'hg': mercurial.Mercurial}
-    default_vcs = 'svn'
-    if '+' not in remote_repository:
-        remote_repository = '%s+%s' % (default_vcs, remote_repository)
-    vcs, repository_path = remote_repository.split('+', 1)
-    vcs_class = vcs_classes[vcs]
-    branch = ''
-    if vcs == 'svn':
-        branch = os.path.basename(remote_repository)
-        repository_name = os.path.basename(remote_repository[:-len(branch)-1]) # remove the slash
-    else:
-        repository_name = os.path.basename(remote_repository)
-
-    destination_path = os.path.join(tests_cache, repository_name)
-    if not os.path.exists(destination_path):
-        vcs_class(remote_repository).obtain(destination_path)
-    return '%s+%s' % (vcs, path_to_url('/'.join([tests_cache, repository_name, branch])))
-
-
-def local_checkout(remote_repo):
-    if remote_repo.startswith('svn'):
-        _create_svn_repository_for_initools()
-    return _get_vcs_and_checkout_url(remote_repo)
-
-
-def local_repo(remote_repo):
-    return local_checkout(remote_repo).split('+', 1)[1]
diff --git a/vendor/pip-1.2.1/tests/packages/BrokenEmitsUTF8/broken.py b/vendor/pip-1.2.1/tests/packages/BrokenEmitsUTF8/broken.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/vendor/pip-1.2.1/tests/packages/BrokenEmitsUTF8/setup.py b/vendor/pip-1.2.1/tests/packages/BrokenEmitsUTF8/setup.py
deleted file mode 100644
index 989cc2a0bb5e7ebf6907d4a0620360ea9a1c47da..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/packages/BrokenEmitsUTF8/setup.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from distutils.core import setup
-import sys
-
-class FakeError(Exception):
-    pass
-
-if sys.argv[1] == 'install':
-    if hasattr(sys.stdout, 'buffer'):
-        sys.stdout.buffer.write('\nThis package prints out UTF-8 stuff like:\n'.encode('utf-8'))
-        sys.stdout.buffer.write('* return type of ‘main’ is not ‘int’\n'.encode('utf-8'))
-        sys.stdout.buffer.write('* Björk Guðmundsdóttir [ˈpjœr̥k ˈkvʏðmʏntsˌtoʊhtɪr]'.encode('utf-8'))
-    else:
-        pass
-        sys.stdout.write('\nThis package prints out UTF-8 stuff like:\n')
-        sys.stdout.write('* return type of \xe2\x80\x98main\xe2\x80\x99 is not \xe2\x80\x98int\xe2\x80\x99\n')
-        sys.stdout.write('* Bj\xc3\xb6rk Gu\xc3\xb0mundsd\xc3\xb3ttir [\xcb\x88pj\xc5\x93r\xcc\xa5k \xcb\x88kv\xca\x8f\xc3\xb0m\xca\x8fnts\xcb\x8cto\xca\x8aht\xc9\xaar]\n')
-
-    raise FakeError('this package designed to fail on install')
-
-setup(name='broken',
-      version='0.2broken',
-      py_modules=['broken'],
-      )
diff --git a/vendor/pip-1.2.1/tests/packages/FSPkg/fspkg/__init__.py b/vendor/pip-1.2.1/tests/packages/FSPkg/fspkg/__init__.py
deleted file mode 100644
index 792d6005489ebee62cde02066f19c5521e620451..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/packages/FSPkg/fspkg/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-#
diff --git a/vendor/pip-1.2.1/tests/packages/FSPkg/setup.cfg b/vendor/pip-1.2.1/tests/packages/FSPkg/setup.cfg
deleted file mode 100644
index 01bb954499e4eebf51604784cd75a64aa82b7b5a..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/packages/FSPkg/setup.cfg
+++ /dev/null
@@ -1,3 +0,0 @@
-[egg_info]
-tag_build = dev
-tag_svn_revision = true
diff --git a/vendor/pip-1.2.1/tests/packages/FSPkg/setup.py b/vendor/pip-1.2.1/tests/packages/FSPkg/setup.py
deleted file mode 100644
index c94ead942fb52c1e5df6fa8a455fb69184dedc39..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/packages/FSPkg/setup.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from setuptools import setup, find_packages
-
-version = '0.1'
-
-setup(name='FSPkg',
-      version=version,
-      description="File system test package",
-      long_description="""\
-File system test package""",
-      classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
-      keywords='pip tests',
-      author='pip',
-      author_email='pip@openplans.org',
-      url='http://pip.openplans.org',
-      license='',
-      packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
-      include_package_data=True,
-      zip_safe=False,
-      install_requires=[
-          # -*- Extra requirements: -*-
-      ],
-      entry_points="""
-      # -*- Entry points: -*-
-      """,
-      )
diff --git a/vendor/pip-1.2.1/tests/packages/LineEndings/setup.py b/vendor/pip-1.2.1/tests/packages/LineEndings/setup.py
deleted file mode 100644
index d65ccfb760606a6bcc3270588f3ca386254df7fe..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/packages/LineEndings/setup.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from distutils.core import setup
-
-setup()
diff --git a/vendor/pip-1.2.1/tests/packages/README.txt b/vendor/pip-1.2.1/tests/packages/README.txt
deleted file mode 100644
index b6ecde635bfa360efb30b2cd2a832d0a8d427aa0..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/packages/README.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-This package exists for testing uninstall-rollback. 
-
-Version 0.2broken has a setup.py crafted to fail on install (and only on
-install). If any earlier step would fail (i.e. egg-info-generation), the
-already-installed version would never be uninstalled, so uninstall-rollback
-would not come into play.
diff --git a/vendor/pip-1.2.1/tests/packages/broken-0.1.tar.gz b/vendor/pip-1.2.1/tests/packages/broken-0.1.tar.gz
deleted file mode 100644
index 3298fce4e02f29bdfa28fed577be86c49e859bec..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1/tests/packages/broken-0.1.tar.gz and /dev/null differ
diff --git a/vendor/pip-1.2.1/tests/packages/broken-0.2broken.tar.gz b/vendor/pip-1.2.1/tests/packages/broken-0.2broken.tar.gz
deleted file mode 100644
index 8c4a8aa74d704cdbdf5c520f211038416431827a..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1/tests/packages/broken-0.2broken.tar.gz and /dev/null differ
diff --git a/vendor/pip-1.2.1/tests/packages/paxpkg.tar.bz2 b/vendor/pip-1.2.1/tests/packages/paxpkg.tar.bz2
deleted file mode 100644
index d4fe6f4a96ed983d459dbc61d414454689e03ebc..0000000000000000000000000000000000000000
Binary files a/vendor/pip-1.2.1/tests/packages/paxpkg.tar.bz2 and /dev/null differ
diff --git a/vendor/pip-1.2.1/tests/packages/pkgwithmpkg-1.0-py2.7-macosx10.7.mpkg.zip b/vendor/pip-1.2.1/tests/packages/pkgwithmpkg-1.0-py2.7-macosx10.7.mpkg.zip
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/vendor/pip-1.2.1/tests/packages/pkgwithmpkg-1.0.tar.gz b/vendor/pip-1.2.1/tests/packages/pkgwithmpkg-1.0.tar.gz
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/vendor/pip-1.2.1/tests/path.py b/vendor/pip-1.2.1/tests/path.py
deleted file mode 100644
index fc724487c4f82a99e547bacd5c90193467824fe9..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/path.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# -*- coding: utf-8 -*-
-# Author: Aziz Köksal
-import os
-import sys
-import shutil
-
-if sys.version_info >= (3,):
-    unicode = str
-    u = str
-else:
-    unicode = unicode
-    u = lambda s: s.decode('utf-8')
-
-_base = os.path.supports_unicode_filenames and unicode or str
-
-from pip.util import rmtree
-
-
-class Path(_base):
-    """ Models a path in an object oriented way. """
-
-    sep = os.sep # File system path separator: '/' or '\'.
-    pathsep = os.pathsep # Separator in the PATH environment variable.
-    string = _base
-
-    def __new__(cls, *paths):
-        if len(paths):
-            return _base.__new__(cls, os.path.join(*paths))
-        return _base.__new__(cls)
-
-    def __div__(self, path):
-        """ Joins this path with another path. """
-        """ path_obj / 'bc.d' """
-        """ path_obj / path_obj2 """
-        return Path(self, path)
-
-    __truediv__ = __div__
-
-    def __rdiv__(self, path):
-        """ Joins this path with another path. """
-        """ "/home/a" / path_obj """
-        return Path(path, self)
-
-    __rtruediv__ = __rdiv__
-
-    def __idiv__(self, path):
-        """ Like __div__ but also assigns to the variable. """
-        """ path_obj /= 'bc.d' """
-        return Path(self, path)
-
-    __itruediv__ = __idiv__
-
-    def __floordiv__(self, paths):
-        """ Returns a list of paths prefixed with 'self'. """
-        """ '/home/a' // [bc.d, ef.g] -> [/home/a/bc.d, /home/a/ef.g] """
-        return [Path(self, path) for path in paths]
-
-    def __sub__(self, path):
-        """ Makes this path relative to another path. """
-        """ path_obj - '/home/a' """
-        """ path_obj - path_obj2 """
-        return Path(os.path.relpath(self, path))
-
-    def __rsub__(self, path):
-        """ Returns path relative to this path. """
-        """ "/home/a" - path_obj """
-        return Path(os.path.relpath(path, self))
-
-    def __add__(self, path):
-        """ Path('/home/a') + 'bc.d' -> '/home/abc.d' """
-        return Path(_base(self) + path)
-
-    def __radd__(self, path):
-        """ '/home/a' + Path('bc.d') -> '/home/abc.d' """
-        return Path(path + _base(self))
-
-    def __repr__(self):
-        return u("Path(%s)" % _base.__repr__(self))
-
-    def __hash__(self):
-        return _base.__hash__(self)
-
-    @property
-    def name(self):
-        """ '/home/a/bc.d' -> 'bc.d' """
-        return os.path.basename(self)
-
-    @property
-    def namebase(self):
-        """ '/home/a/bc.d' -> 'bc' """
-        return self.noext.name
-
-    @property
-    def noext(self):
-        """ '/home/a/bc.d' -> '/home/a/bc' """
-        return Path(os.path.splitext(self)[0])
-
-    @property
-    def ext(self):
-        """ '/home/a/bc.d' -> '.d' """
-        return Path(os.path.splitext(self)[1])
-
-    @property
-    def abspath(self):
-        """ './a/bc.d' -> '/home/a/bc.d'  """
-        return Path(os.path.abspath(self))
-
-    @property
-    def realpath(self):
-        """ Resolves symbolic links. """
-        return Path(os.path.realpath(self))
-
-    @property
-    def normpath(self):
-        """ '/home/x/.././a//bc.d' -> '/home/a/bc.d' """
-        return Path(os.path.normpath(self))
-
-    @property
-    def normcase(self):
-        """ Deals with case-insensitive filesystems """
-        return Path(os.path.normcase(self))
-
-    @property
-    def folder(self):
-        """ Returns the folder of this path. """
-        """ '/home/a/bc.d' -> '/home/a' """
-        """ '/home/a/' -> '/home/a' """
-        """ '/home/a' -> '/home' """
-        return Path(os.path.dirname(self))
-
-    @property
-    def exists(self):
-        """ Returns True if the path exists. """
-        return os.path.exists(self)
-
-    @property
-    def atime(self):
-        """ Returns last accessed time. """
-        return os.path.getatime(self)
-
-    @property
-    def mtime(self):
-        """ Returns last modified time. """
-        return os.path.getmtime(self)
-
-    @property
-    def ctime(self):
-        """ Returns last changed time. """
-        return os.path.getctime(self)
-
-    @classmethod
-    def supports_unicode(self):
-        """ Returns True if the system can handle Unicode file names. """
-        return os.path.supports_unicode_filenames()
-
-    def walk(self, **kwargs):
-        """ Returns a generator that walks through a directory tree. """
-        if "followlinks" in kwargs:
-            from sys import version_info as vi
-            if vi[0]*10+vi[1] < 26: # Only Python 2.6 or newer supports followlinks
-                del kwargs["followlinks"]
-        return os.walk(self, **kwargs)
-
-    def mkdir(self, mode=0x1FF): # 0o777
-        """ Creates a directory, if it doesn't exist already. """
-        if not self.exists:
-            os.mkdir(self, mode)
-
-    def makedirs(self, mode=0x1FF): # 0o777
-        """ Like mkdir(), but also creates parent directories. """
-        if not self.exists:
-            os.makedirs(self, mode)
-
-    def remove(self):
-        """ Removes a file. """
-        os.remove(self)
-    rm = remove # Alias.
-
-    def rmdir(self):
-        """ Removes a directory. """
-        return os.rmdir(self)
-
-    def rmtree(self, noerrors=True):
-        """ Removes a directory tree. Ignores errors by default. """
-        return rmtree(self, ignore_errors=noerrors)
-
-    def copy(self, to):
-        shutil.copy(self, to)
-
-    def copytree(self, to):
-        """ Copies a directory tree to another path. """
-        shutil.copytree(self, to)
-
-    def move(self, to):
-        """ Moves a file or directory to another path. """
-        shutil.move(self, to)
-
-    def rename(self, to):
-        """ Renames a file or directory. May throw an OSError. """
-        os.rename(self, to)
-
-    def renames(self, to):
-        os.renames(self, to)
-
-    def glob(self, pattern):
-        from glob import glob
-        return list(map(Path, glob(_base(self/pattern))))
-
-curdir = Path(os.path.curdir)
diff --git a/vendor/pip-1.2.1/tests/pypi_server.py b/vendor/pip-1.2.1/tests/pypi_server.py
deleted file mode 100644
index e05f944f4ff9a6661586dc77150b01a623edfde1..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/pypi_server.py
+++ /dev/null
@@ -1,129 +0,0 @@
-import os
-import pip.backwardcompat
-from pip.backwardcompat import urllib, string_types, b, u, emailmessage
-
-
-urlopen_original = pip.backwardcompat.urllib2.urlopen
-
-
-class CachedResponse(object):
-    """
-    CachedResponse always cache url access and returns the cached response.
-    It returns an object compatible with ``urllib.addinfourl``,
-    it means the object is like the result of a call like::
-
-        >>> response = urllib2.urlopen('http://example.com')
-    """
-
-    def __init__(self, url, folder):
-        self.headers = emailmessage.Message()
-        self.code = 500
-        self.msg = 'Internal Server Error'
-        # url can be a simple string, or a urllib2.Request object
-        if isinstance(url, string_types):
-            self.url = url
-        else:
-            self.url = url.get_full_url()
-            for key, value in url.headers.items():
-                self.headers[key] = value
-        self._body = b('')
-        self._set_all_fields(folder)
-
-    def _set_all_fields(self, folder):
-        filename = os.path.join(folder, urllib.quote(self.url, ''))
-        if not os.path.exists(filename):
-            self._cache_url(filename)
-        fp = open(filename, 'rb')
-        try:
-            line = fp.readline().strip()
-            self.code, self.msg = line.split(None, 1)
-        except ValueError:
-            raise ValueError('Bad field line: %r' % line)
-        self.code = int(self.code)
-        self.msg = u(self.msg)
-        for line in fp:
-            if line == b('\n'):
-                break
-            key, value = line.split(b(': '), 1)
-            self.headers[u(key)] = u(value.strip())
-        for line in fp:
-            self._body += line
-        fp.close()
-
-    def getcode(self):
-        return self.code
-
-    def geturl(self):
-        return self.url
-
-    def info(self):
-        return self.headers
-
-    def read(self, bytes=None):
-        """
-        it can read a chunk of bytes or everything
-        """
-        if bytes:
-            result = self._body[:bytes]
-            self._body = self._body[bytes:]
-            return result
-        return self._body
-
-    def close(self):
-        pass
-
-    def _cache_url(self, filepath):
-        response = urlopen_original(self.url)
-        fp = open(filepath, 'wb')
-        # when it uses file:// scheme, code is None and there is no msg attr
-        # but it has been successfully opened
-        status = b('%s %s' % (getattr(response, 'code', 200) or 200, getattr(response, 'msg', 'OK')))
-        headers = [b('%s: %s' % (key, value)) for key, value in list(response.headers.items())]
-        body = response.read()
-        fp.write(b('\n').join([status] + headers + [b(''), body]))
-        fp.close()
-
-
-class PyPIProxy(object):
-
-    CACHE_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "tests_cache")
-
-    @classmethod
-    def setup(cls):
-        instance = cls()
-        instance._create_cache_folder()
-        instance._monkey_patch_urllib2_to_cache_everything()
-
-    def _monkey_patch_urllib2_to_cache_everything(self):
-        def urlopen(url):
-            return CachedResponse(url, self.CACHE_PATH)
-        pip.backwardcompat.urllib2.urlopen = urlopen
-
-    def _create_cache_folder(self):
-        if not os.path.exists(self.CACHE_PATH):
-            os.mkdir(self.CACHE_PATH)
-
-
-def assert_equal(a, b):
-    assert a == b, "\nexpected:\n%r\ngot:\n%r" % (b, a)
-
-
-def test_cache_proxy():
-    url = 'http://example.com'
-    here = os.path.dirname(os.path.abspath(__file__))
-    filepath = os.path.join(here, urllib.quote(url, ''))
-    if os.path.exists(filepath):
-        os.remove(filepath)
-    response = pip.backwardcompat.urllib2.urlopen(url)
-    r = CachedResponse(url, here)
-    try:
-        assert_equal(r.code, response.code)
-        assert_equal(r.msg, response.msg)
-        assert_equal(r.read(), response.read())
-        assert_equal(r.url, response.url)
-        assert_equal(r.geturl(), response.geturl())
-        assert_equal(set(r.headers.keys()), set(response.headers.keys()))
-        assert_equal(set(r.info().keys()), set(response.info().keys()))
-        assert_equal(r.headers['content-length'], response.headers['content-length'])
-    finally:
-        os.remove(filepath)
diff --git a/vendor/pip-1.2.1/tests/test_all_pip.py b/vendor/pip-1.2.1/tests/test_all_pip.py
deleted file mode 100644
index f2f1ea16d48d9c22d698af5790a3e6df22562346..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_all_pip.py
+++ /dev/null
@@ -1,118 +0,0 @@
-import os
-import re
-import sys
-import subprocess
-import shutil
-from os.path import dirname, abspath
-
-from pip.backwardcompat import urllib
-from pip.util import rmtree
-
-
-src_folder = dirname(dirname(abspath(__file__)))
-
-
-def all_projects():
-    data = urllib.urlopen('http://pypi.python.org/simple/').read()
-    projects = [m.group(1) for m in re.finditer(r'<a.*?>(.+)</a>', data)]
-    return projects
-
-
-def main(args=None):
-    if args is None:
-        args = sys.argv[1:]
-    if not args:
-        print('Usage: test_all_pip.py <output-dir>')
-        sys.exit(1)
-    output = os.path.abspath(args[0])
-    if not os.path.exists(output):
-        print('Creating %s' % output)
-        os.makedirs(output)
-    pending_fn = os.path.join(output, 'pending.txt')
-    if not os.path.exists(pending_fn):
-        print('Downloading pending list')
-        projects = all_projects()
-        print('Found %s projects' % len(projects))
-        f = open(pending_fn, 'w')
-        for name in projects:
-            f.write(name + '\n')
-        f.close()
-    print('Starting testing...')
-    while os.stat(pending_fn).st_size:
-        _test_packages(output, pending_fn)
-    print('Finished all pending!')
-
-
-def _test_packages(output, pending_fn):
-    package = get_last_item(pending_fn)
-    print('Testing package %s' % package)
-    dest_dir = os.path.join(output, package)
-    print('Creating virtualenv in %s' % dest_dir)
-    create_venv(dest_dir)
-    print('Uninstalling actual pip')
-    code = subprocess.check_call([os.path.join(dest_dir, 'bin', 'pip'),
-                            'uninstall', '-y', 'pip'])
-    assert not code, 'pip uninstallation failed'
-    print('Installing development pip')
-    code = subprocess.check_call([os.path.join(dest_dir, 'bin', 'python'),
-                            'setup.py', 'install'],
-                            cwd=src_folder)
-    assert not code, 'pip installation failed'
-    print('Trying installation of %s' % dest_dir)
-    code = subprocess.check_call([os.path.join(dest_dir, 'bin', 'pip'),
-                            'install', package])
-    if code:
-        print('Installation of %s failed' % package)
-        print('Now checking easy_install...')
-        create_venv(dest_dir)
-        code = subprocess.check_call([os.path.join(dest_dir, 'bin', 'easy_install'),
-                                package])
-        if code:
-            print('easy_install also failed')
-            add_package(os.path.join(output, 'easy-failure.txt'), package)
-        else:
-            print('easy_install succeeded')
-            add_package(os.path.join(output, 'failure.txt'), package)
-        pop_last_item(pending_fn, package)
-    else:
-        print('Installation of %s succeeded' % package)
-        add_package(os.path.join(output, 'success.txt'), package)
-        pop_last_item(pending_fn, package)
-        rmtree(dest_dir)
-
-
-def create_venv(dest_dir):
-    if os.path.exists(dest_dir):
-        rmtree(dest_dir)
-    print('Creating virtualenv in %s' % dest_dir)
-    code = subprocess.check_call(['virtualenv', '--no-site-packages', dest_dir])
-    assert not code, "virtualenv failed"
-
-
-def get_last_item(fn):
-    f = open(fn, 'r')
-    lines = f.readlines()
-    f.close()
-    return lines[-1].strip()
-
-
-def pop_last_item(fn, line=None):
-    f = open(fn, 'r')
-    lines = f.readlines()
-    f.close()
-    if line:
-        assert lines[-1].strip() == line.strip()
-    lines.pop()
-    f = open(fn, 'w')
-    f.writelines(lines)
-    f.close()
-
-
-def add_package(filename, package):
-    f = open(filename, 'a')
-    f.write(package + '\n')
-    f.close()
-
-
-if __name__ == '__main__':
-    main()
diff --git a/vendor/pip-1.2.1/tests/test_basic.py b/vendor/pip-1.2.1/tests/test_basic.py
deleted file mode 100644
index 8bdce9772d6a1d1118c29b7706a6ecac1e36b6e8..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_basic.py
+++ /dev/null
@@ -1,603 +0,0 @@
-import re
-import os
-import filecmp
-import textwrap
-import sys
-from os.path import abspath, join, curdir, pardir
-
-from nose import SkipTest
-from nose.tools import assert_raises
-from mock import patch
-
-from pip.util import rmtree, find_command
-from pip.exceptions import BadCommand
-
-from tests.test_pip import (here, reset_env, run_pip, pyversion, mkdir,
-                            src_folder, write_file)
-from tests.local_repos import local_checkout
-from tests.path import Path
-
-
-def test_correct_pip_version():
-    """
-    Check we are running proper version of pip in run_pip.
-    """
-    reset_env()
-
-    # output is like:
-    # pip PIPVERSION from PIPDIRECTORY (python PYVERSION)
-    result = run_pip('--version')
-
-    # compare the directory tree of the invoked pip with that of this source distribution
-    dir = re.match(r'pip \d(\.[\d])+(\.(pre|post)\d+)? from (.*) \(python \d(.[\d])+\)$',
-                   result.stdout).group(4)
-    pip_folder = join(src_folder, 'pip')
-    pip_folder_outputed = join(dir, 'pip')
-
-    diffs = filecmp.dircmp(pip_folder, pip_folder_outputed)
-
-    # If any non-matching .py files exist, we have a problem: run_pip
-    # is picking up some other version!  N.B. if this project acquires
-    # primary resources other than .py files, this code will need
-    # maintenance
-    mismatch_py = [x for x in diffs.left_only + diffs.right_only + diffs.diff_files if x.endswith('.py')]
-    assert not mismatch_py, 'mismatched source files in %r and %r'% (pip_folder, pip_folder_outputed)
-
-
-def test_pip_second_command_line_interface_works():
-    """
-    Check if ``pip-<PYVERSION>`` commands behaves equally
-    """
-    e = reset_env()
-    result = e.run('pip-%s' % pyversion, 'install', 'INITools==0.2')
-    egg_info_folder = e.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
-    initools_folder = e.site_packages / 'initools'
-    assert egg_info_folder in result.files_created, str(result)
-    assert initools_folder in result.files_created, str(result)
-
-
-#def test_distutils_configuration_setting():
-#    """
-#    Test the distutils-configuration-setting command (which is distinct from other commands).
-#    """
-    #print run_pip('-vv', '--distutils-cfg=easy_install:index_url:http://download.zope.org/ppix/', expect_error=True)
-    #Script result: python ../../poacheggs.py -E .../poacheggs-tests/test-scratch -vv --distutils-cfg=easy_install:index_url:http://download.zope.org/ppix/
-    #-- stdout: --------------------
-    #Distutils config .../poacheggs-tests/test-scratch/lib/python.../distutils/distutils.cfg is writable
-    #Replaced setting index_url
-    #Updated .../poacheggs-tests/test-scratch/lib/python.../distutils/distutils.cfg
-    #<BLANKLINE>
-    #-- updated: -------------------
-    #  lib/python2.4/distutils/distutils.cfg  (346 bytes)
-
-
-def test_install_from_pypi():
-    """
-    Test installing a package from PyPI.
-    """
-    e = reset_env()
-    result = run_pip('install', '-vvv', 'INITools==0.2')
-    egg_info_folder = e.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
-    initools_folder = e.site_packages / 'initools'
-    assert egg_info_folder in result.files_created, str(result)
-    assert initools_folder in result.files_created, str(result)
-
-
-def test_install_from_mirrors():
-    """
-    Test installing a package from the PyPI mirrors.
-    """
-    e = reset_env()
-    result = run_pip('install', '-vvv', '--use-mirrors', '--no-index', 'INITools==0.2')
-    egg_info_folder = e.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
-    initools_folder = e.site_packages / 'initools'
-    assert egg_info_folder in result.files_created, str(result)
-    assert initools_folder in result.files_created, str(result)
-
-
-def test_install_from_mirrors_with_specific_mirrors():
-    """
-    Test installing a package from a specific PyPI mirror.
-    """
-    e = reset_env()
-    result = run_pip('install', '-vvv', '--use-mirrors', '--mirrors', "http://d.pypi.python.org/", '--no-index', 'INITools==0.2')
-    egg_info_folder = e.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
-    initools_folder = e.site_packages / 'initools'
-    assert egg_info_folder in result.files_created, str(result)
-    assert initools_folder in result.files_created, str(result)
-
-
-def test_editable_install():
-    """
-    Test editable installation.
-    """
-    reset_env()
-    result = run_pip('install', '-e', 'INITools==0.2', expect_error=True)
-    assert "--editable=INITools==0.2 should be formatted with svn+URL" in result.stdout
-    assert len(result.files_created) == 1, result.files_created
-    assert not result.files_updated, result.files_updated
-
-
-def test_install_editable_from_svn():
-    """
-    Test checking out from svn.
-    """
-    reset_env()
-    result = run_pip('install',
-                     '-e',
-                     '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'))
-    result.assert_installed('INITools', with_files=['.svn'])
-
-
-def test_download_editable_to_custom_path():
-    """
-    Test downloading an editable using a relative custom src folder.
-    """
-    reset_env()
-    mkdir('customdl')
-    result = run_pip('install',
-                     '-e',
-                     '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'),
-                     '--src',
-                     'customsrc',
-                     '--download',
-                     'customdl')
-    customsrc = Path('scratch')/'customsrc'/'initools'
-    assert customsrc in result.files_created, sorted(result.files_created.keys())
-    assert customsrc/'setup.py' in result.files_created, sorted(result.files_created.keys())
-
-    customdl = Path('scratch')/'customdl'/'initools'
-    customdl_files_created = [filename for filename in result.files_created
-                                           if filename.startswith(customdl)]
-    assert customdl_files_created
-
-
-def test_editable_no_install_followed_by_no_download():
-    """
-    Test installing an editable in two steps (first with --no-install, then with --no-download).
-    """
-    reset_env()
-
-    result = run_pip('install',
-                     '-e',
-                     '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'),
-                     '--no-install', expect_error=True)
-    result.assert_installed('INITools', without_egg_link=True, with_files=['.svn'])
-
-    result = run_pip('install',
-                     '-e',
-                     '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'),
-                     '--no-download', expect_error=True)
-    result.assert_installed('INITools', without_files=[curdir, '.svn'])
-
-
-def test_no_install_followed_by_no_download():
-    """
-    Test installing in two steps (first with --no-install, then with --no-download).
-    """
-    env = reset_env()
-
-    egg_info_folder = env.site_packages/'INITools-0.2-py%s.egg-info' % pyversion
-    initools_folder = env.site_packages/'initools'
-    build_dir = env.venv/'build'/'INITools'
-
-    result1 = run_pip('install', 'INITools==0.2', '--no-install', expect_error=True)
-    assert egg_info_folder not in result1.files_created, str(result1)
-    assert initools_folder not in result1.files_created, sorted(result1.files_created)
-    assert build_dir in result1.files_created, result1.files_created
-    assert build_dir/'INITools.egg-info' in result1.files_created
-
-    result2 = run_pip('install', 'INITools==0.2', '--no-download', expect_error=True)
-    assert egg_info_folder in result2.files_created, str(result2)
-    assert initools_folder in result2.files_created, sorted(result2.files_created)
-    assert build_dir not in result2.files_created
-    assert build_dir/'INITools.egg-info' not in result2.files_created
-
-
-def test_bad_install_with_no_download():
-    """
-    Test that --no-download behaves sensibly if the package source can't be found.
-    """
-    reset_env()
-    result = run_pip('install', 'INITools==0.2', '--no-download', expect_error=True)
-    assert "perhaps --no-download was used without first running "\
-            "an equivalent install with --no-install?" in result.stdout
-
-
-def test_install_dev_version_from_pypi():
-    """
-    Test using package==dev.
-    """
-    e = reset_env()
-    result = run_pip('install', 'INITools==dev', expect_error=True)
-    assert (e.site_packages / 'initools') in result.files_created, str(result.stdout)
-
-
-def test_install_editable_from_git():
-    """
-    Test cloning from Git.
-    """
-    reset_env()
-    args = ['install']
-    args.extend(['-e',
-                 '%s#egg=pip-test-package' %
-                 local_checkout('git+http://github.com/pypa/pip-test-package.git')])
-    result = run_pip(*args, **{"expect_error": True})
-    result.assert_installed('pip-test-package', with_files=['.git'])
-
-
-def test_install_editable_from_hg():
-    """
-    Test cloning from Mercurial.
-    """
-    reset_env()
-    result = run_pip('install', '-e',
-                     '%s#egg=django-registration' %
-                     local_checkout('hg+http://bitbucket.org/ubernostrum/django-registration'),
-                     expect_error=True)
-    result.assert_installed('django-registration', with_files=['.hg'])
-
-
-def test_vcs_url_final_slash_normalization():
-    """
-    Test that presence or absence of final slash in VCS URL is normalized.
-    """
-    reset_env()
-    result = run_pip('install', '-e',
-                     '%s/#egg=django-registration' %
-                     local_checkout('hg+http://bitbucket.org/ubernostrum/django-registration'),
-                     expect_error=True)
-    assert 'pip-log.txt' not in result.files_created, result.files_created['pip-log.txt'].bytes
-
-
-def test_install_editable_from_bazaar():
-    """
-    Test checking out from Bazaar.
-    """
-    reset_env()
-    result = run_pip('install', '-e',
-                     '%s/@174#egg=django-wikiapp' %
-                     local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'),
-                     expect_error=True)
-    result.assert_installed('django-wikiapp', with_files=['.bzr'])
-
-
-def test_vcs_url_urlquote_normalization():
-    """
-    Test that urlquoted characters are normalized for repo URL comparison.
-    """
-    reset_env()
-    result = run_pip('install', '-e',
-                     '%s/#egg=django-wikiapp' %
-                     local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'),
-                     expect_error=True)
-    assert 'pip-log.txt' not in result.files_created, result.files_created['pip-log.txt'].bytes
-
-
-def test_install_from_local_directory():
-    """
-    Test installing from a local directory.
-    """
-    env = reset_env()
-    to_install = abspath(join(here, 'packages', 'FSPkg'))
-    result = run_pip('install', to_install, expect_error=False)
-    fspkg_folder = env.site_packages/'fspkg'
-    egg_info_folder = env.site_packages/'FSPkg-0.1dev-py%s.egg-info' % pyversion
-    assert fspkg_folder in result.files_created, str(result.stdout)
-    assert egg_info_folder in result.files_created, str(result)
-
-
-def test_install_from_local_directory_with_no_setup_py():
-    """
-    Test installing from a local directory with no 'setup.py'.
-    """
-    reset_env()
-    result = run_pip('install', here, expect_error=True)
-    assert len(result.files_created) == 1, result.files_created
-    assert 'pip-log.txt' in result.files_created, result.files_created
-    assert "is not installable. File 'setup.py' not found." in result.stdout
-
-
-def test_install_curdir():
-    """
-    Test installing current directory ('.').
-    """
-    env = reset_env()
-    run_from = abspath(join(here, 'packages', 'FSPkg'))
-    # Python 2.4 Windows balks if this exists already
-    egg_info = join(run_from, "FSPkg.egg-info")
-    if os.path.isdir(egg_info):
-        rmtree(egg_info)
-    result = run_pip('install', curdir, cwd=run_from, expect_error=False)
-    fspkg_folder = env.site_packages/'fspkg'
-    egg_info_folder = env.site_packages/'FSPkg-0.1dev-py%s.egg-info' % pyversion
-    assert fspkg_folder in result.files_created, str(result.stdout)
-    assert egg_info_folder in result.files_created, str(result)
-
-
-def test_install_curdir_usersite_fails_in_old_python():
-    """
-    Test --user option on older Python versions (pre 2.6) fails intelligibly
-    """
-    if sys.version_info >= (2, 6):
-        raise SkipTest()
-    reset_env()
-    run_from = abspath(join(here, 'packages', 'FSPkg'))
-    result = run_pip('install', '--user', curdir, cwd=run_from, expect_error=True)
-    assert '--user is only supported in Python version 2.6 and newer' in result.stdout
-
-
-def test_install_curdir_usersite():
-    """
-    Test installing current directory ('.') into usersite
-    """
-    if sys.version_info < (2, 6):
-        raise SkipTest()
-    # FIXME distutils --user option seems to be broken in pypy
-    if hasattr(sys, "pypy_version_info"):
-        raise SkipTest()
-    env = reset_env(use_distribute=True)
-    run_from = abspath(join(here, 'packages', 'FSPkg'))
-    result = run_pip('install', '--user', curdir, cwd=run_from, expect_error=False)
-    fspkg_folder = env.user_site/'fspkg'
-    egg_info_folder = env.user_site/'FSPkg-0.1dev-py%s.egg-info' % pyversion
-    assert fspkg_folder in result.files_created, str(result.stdout)
-
-    assert egg_info_folder in result.files_created, str(result)
-
-
-def test_install_subversion_usersite_editable_with_distribute():
-    """
-    Test installing current directory ('.') into usersite after installing distribute
-    """
-    if sys.version_info < (2, 6):
-        raise SkipTest()
-    # FIXME distutils --user option seems to be broken in pypy
-    if hasattr(sys, "pypy_version_info"):
-        raise SkipTest()
-    env = reset_env(use_distribute=True)
-    (env.lib_path/'no-global-site-packages.txt').rm() # this one reenables user_site
-
-    result = run_pip('install', '--user', '-e',
-                     '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'))
-    result.assert_installed('INITools', use_user_site=True)
-
-
-def test_install_subversion_usersite_editable_with_setuptools_fails():
-    """
-    Test installing current directory ('.') into usersite using setuptools fails
-    """
-    # --user only works on 2.6 or higher
-    if sys.version_info < (2, 6):
-        raise SkipTest()
-    # We don't try to use setuptools for 3.X.
-    elif sys.version_info >= (3,):
-        raise SkipTest()
-    env = reset_env(use_distribute=False)
-    no_site_packages = env.lib_path/'no-global-site-packages.txt'
-    if os.path.isfile(no_site_packages):
-        no_site_packages.rm() # this re-enables user_site
-
-    result = run_pip('install', '--user', '-e',
-                     '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'),
-                     expect_error=True)
-    assert '--user --editable not supported with setuptools, use distribute' in result.stdout
-
-
-def test_install_pardir():
-    """
-    Test installing parent directory ('..').
-    """
-    env = reset_env()
-    run_from = abspath(join(here, 'packages', 'FSPkg', 'fspkg'))
-    result = run_pip('install', pardir, cwd=run_from, expect_error=False)
-    fspkg_folder = env.site_packages/'fspkg'
-    egg_info_folder = env.site_packages/'FSPkg-0.1dev-py%s.egg-info' % pyversion
-    assert fspkg_folder in result.files_created, str(result.stdout)
-    assert egg_info_folder in result.files_created, str(result)
-
-
-def test_install_global_option():
-    """
-    Test using global distutils options.
-    (In particular those that disable the actual install action)
-    """
-    reset_env()
-    result = run_pip('install', '--global-option=--version', "INITools==0.1")
-    assert '0.1\n' in result.stdout
-
-
-def test_install_with_pax_header():
-    """
-    test installing from a tarball with pax header for python<2.6
-    """
-    reset_env()
-    run_from = abspath(join(here, 'packages'))
-    run_pip('install', 'paxpkg.tar.bz2', cwd=run_from)
-
-
-def test_install_using_install_option_and_editable():
-    """
-    Test installing a tool using -e and --install-option
-    """
-    env = reset_env()
-    folder = 'script_folder'
-    mkdir(folder)
-    url = 'git+git://github.com/pypa/virtualenv'
-    result = run_pip('install', '-e', '%s#egg=virtualenv' %
-                      local_checkout(url),
-                     '--install-option=--script-dir=%s' % folder)
-    virtualenv_bin = env.venv/'src'/'virtualenv'/folder/'virtualenv'+env.exe
-    assert virtualenv_bin in result.files_created
-
-
-def test_install_global_option_using_editable():
-    """
-    Test using global distutils options, but in an editable installation
-    """
-    reset_env()
-    url = 'hg+http://bitbucket.org/runeh/anyjson'
-    result = run_pip('install', '--global-option=--version',
-                     '-e', '%s@0.2.5#egg=anyjson' %
-                      local_checkout(url))
-    assert '0.2.5\n' in result.stdout
-
-
-def test_install_package_with_same_name_in_curdir():
-    """
-    Test installing a package with the same name of a local folder
-    """
-    env = reset_env()
-    mkdir('mock==0.6')
-    result = run_pip('install', 'mock==0.6')
-    egg_folder = env.site_packages / 'mock-0.6.0-py%s.egg-info' % pyversion
-    assert egg_folder in result.files_created, str(result)
-
-
-mock100_setup_py = textwrap.dedent('''\
-                        from setuptools import setup
-                        setup(name='mock',
-                              version='100.1')''')
-
-
-def test_install_folder_using_dot_slash():
-    """
-    Test installing a folder using pip install ./foldername
-    """
-    env = reset_env()
-    mkdir('mock')
-    pkg_path = env.scratch_path/'mock'
-    write_file('setup.py', mock100_setup_py, pkg_path)
-    result = run_pip('install', './mock')
-    egg_folder = env.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
-    assert egg_folder in result.files_created, str(result)
-
-
-def test_install_folder_using_slash_in_the_end():
-    r"""
-    Test installing a folder using pip install foldername/ or foldername\
-    """
-    env = reset_env()
-    mkdir('mock')
-    pkg_path = env.scratch_path/'mock'
-    write_file('setup.py', mock100_setup_py, pkg_path)
-    result = run_pip('install', 'mock' + os.path.sep)
-    egg_folder = env.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
-    assert egg_folder in result.files_created, str(result)
-
-
-def test_install_folder_using_relative_path():
-    """
-    Test installing a folder using pip install folder1/folder2
-    """
-    env = reset_env()
-    mkdir('initools')
-    mkdir(Path('initools')/'mock')
-    pkg_path = env.scratch_path/'initools'/'mock'
-    write_file('setup.py', mock100_setup_py, pkg_path)
-    result = run_pip('install', Path('initools')/'mock')
-    egg_folder = env.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
-    assert egg_folder in result.files_created, str(result)
-
-
-def test_install_package_which_contains_dev_in_name():
-    """
-    Test installing package from pypi which contains 'dev' in name
-    """
-    env = reset_env()
-    result = run_pip('install', 'django-devserver==0.0.4')
-    devserver_folder = env.site_packages/'devserver'
-    egg_info_folder = env.site_packages/'django_devserver-0.0.4-py%s.egg-info' % pyversion
-    assert devserver_folder in result.files_created, str(result.stdout)
-    assert egg_info_folder in result.files_created, str(result)
-
-
-def test_install_package_with_target():
-    """
-    Test installing a package using pip install --target
-    """
-    env = reset_env()
-    target_dir = env.scratch_path/'target'
-    result = run_pip('install', '-t', target_dir, "initools==0.1")
-    assert Path('scratch')/'target'/'initools' in result.files_created, str(result)
-
-
-def test_find_command_folder_in_path():
-    """
-    If a folder named e.g. 'git' is in PATH, and find_command is looking for
-    the 'git' executable, it should not match the folder, but rather keep
-    looking.
-    """
-    env = reset_env()
-    mkdir('path_one')
-    path_one = env.scratch_path/'path_one'
-    mkdir(path_one/'foo')
-    mkdir('path_two')
-    path_two = env.scratch_path/'path_two'
-    write_file(path_two/'foo', '# nothing')
-    found_path = find_command('foo', map(str, [path_one, path_two]))
-    assert found_path == path_two/'foo'
-
-
-def test_does_not_find_command_because_there_is_no_path():
-    """
-    Test calling `pip.utils.find_command` when there is no PATH env variable
-    """
-    environ_before = os.environ
-    os.environ = {}
-    try:
-        try:
-            find_command('anycommand')
-        except BadCommand:
-            e = sys.exc_info()[1]
-            assert e.args == ("Cannot find command 'anycommand'",)
-        else:
-            raise AssertionError("`find_command` should raise `BadCommand`")
-    finally:
-        os.environ = environ_before
-
-
-@patch('os.pathsep', ':')
-@patch('pip.util.get_pathext')
-@patch('os.path.isfile')
-def test_find_command_trys_all_pathext(mock_isfile, getpath_mock):
-    """
-    If no pathext should check default list of extensions, if file does not
-    exist.
-    """
-    mock_isfile.return_value = False
-
-    getpath_mock.return_value = os.pathsep.join([".COM", ".EXE"])
-
-    paths = [os.path.join('path_one', f)  for f in ['foo.com', 'foo.exe', 'foo']]
-    expected = [((p,),) for p in paths]
-
-    assert_raises(BadCommand, find_command, 'foo', 'path_one')
-    assert mock_isfile.call_args_list == expected, "Actual: %s\nExpected %s" % (mock_isfile.call_args_list, expected)
-    assert getpath_mock.called, "Should call get_pathext"
-
-
-@patch('os.pathsep', ':')
-@patch('pip.util.get_pathext')
-@patch('os.path.isfile')
-def test_find_command_trys_supplied_pathext(mock_isfile, getpath_mock):
-    """
-    If pathext supplied find_command should use all of its list of extensions to find file.
-    """
-    mock_isfile.return_value = False
-    getpath_mock.return_value = ".FOO"
-
-    pathext = os.pathsep.join([".RUN", ".CMD"])
-
-    paths = [os.path.join('path_one', f)  for f in ['foo.run', 'foo.cmd', 'foo']]
-    expected = [((p,),) for p in paths]
-
-    assert_raises(BadCommand, find_command, 'foo', 'path_one', pathext)
-    assert mock_isfile.call_args_list == expected, "Actual: %s\nExpected %s" % (mock_isfile.call_args_list, expected)
-    assert not getpath_mock.called, "Should not call get_pathext"
diff --git a/vendor/pip-1.2.1/tests/test_bundle.py b/vendor/pip-1.2.1/tests/test_bundle.py
deleted file mode 100644
index cf5c56376ff633bb450a46f197347c7c7f7187f1..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_bundle.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import zipfile
-import textwrap
-from os.path import join
-from pip.download import path_to_url2
-from tests.test_pip import here, reset_env, run_pip, write_file
-from tests.path import Path
-from tests.local_repos import local_checkout
-
-
-def test_create_bundle():
-    """
-    Test making a bundle.  We'll grab one package from the filesystem
-    (the FSPkg dummy package), one from vcs (initools) and one from an
-    index (pip itself).
-
-    """
-    env = reset_env()
-    fspkg = path_to_url2(Path(here)/'packages'/'FSPkg')
-    run_pip('install', '-e', fspkg)
-    pkg_lines = textwrap.dedent('''\
-            -e %s
-            -e %s#egg=initools-dev
-            pip''' % (fspkg, local_checkout('svn+http://svn.colorstudy.com/INITools/trunk')))
-    write_file('bundle-req.txt', pkg_lines)
-    # Create a bundle in env.scratch_path/ test.pybundle
-    result = run_pip('bundle', '-r', env.scratch_path/ 'bundle-req.txt', env.scratch_path/ 'test.pybundle')
-    bundle = result.files_after.get(join('scratch', 'test.pybundle'), None)
-    assert bundle is not None
-
-    files = zipfile.ZipFile(bundle.full).namelist()
-    assert 'src/FSPkg/' in files
-    assert 'src/initools/' in files
-    assert 'build/pip/' in files
diff --git a/vendor/pip-1.2.1/tests/test_cleanup.py b/vendor/pip-1.2.1/tests/test_cleanup.py
deleted file mode 100644
index 15a0508333bdfa2d93821cafd0e26e2ec1e212ff..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_cleanup.py
+++ /dev/null
@@ -1,114 +0,0 @@
-import os
-import textwrap
-from os.path import abspath, exists, join
-from tests.test_pip import (here, reset_env, run_pip, write_file, mkdir)
-from tests.local_repos import local_checkout
-from tests.path import Path
-
-
-def test_cleanup_after_install_from_pypi():
-    """
-    Test clean up after installing a package from PyPI.
-
-    """
-    env = reset_env()
-    run_pip('install', 'INITools==0.2', expect_error=True)
-    build = env.scratch_path/"build"
-    src = env.scratch_path/"src"
-    assert not exists(build), "build/ dir still exists: %s" % build
-    assert not exists(src), "unexpected src/ dir exists: %s" % src
-
-
-def test_cleanup_after_install_editable_from_hg():
-    """
-    Test clean up after cloning from Mercurial.
-
-    """
-    env = reset_env()
-    run_pip('install',
-            '-e',
-            '%s#egg=django-registration' %
-            local_checkout('hg+http://bitbucket.org/ubernostrum/django-registration'),
-            expect_error=True)
-    build = env.venv_path/'build'
-    src = env.venv_path/'src'
-    assert not exists(build), "build/ dir still exists: %s" % build
-    assert exists(src), "expected src/ dir doesn't exist: %s" % src
-
-
-def test_cleanup_after_install_from_local_directory():
-    """
-    Test clean up after installing from a local directory.
-
-    """
-    env = reset_env()
-    to_install = abspath(join(here, 'packages', 'FSPkg'))
-    run_pip('install', to_install, expect_error=False)
-    build = env.venv_path/'build'
-    src = env.venv_path/'src'
-    assert not exists(build), "unexpected build/ dir exists: %s" % build
-    assert not exists(src), "unexpected src/ dir exist: %s" % src
-
-
-def test_cleanup_after_create_bundle():
-    """
-    Test clean up after making a bundle. Make sure (build|src)-bundle/ dirs are removed but not src/.
-
-    """
-    env = reset_env()
-    # Install an editable to create a src/ dir.
-    args = ['install']
-    args.extend(['-e',
-                 '%s#egg=pip-test-package' %
-                    local_checkout('git+http://github.com/pypa/pip-test-package.git')])
-    run_pip(*args)
-    build = env.venv_path/"build"
-    src = env.venv_path/"src"
-    assert not exists(build), "build/ dir still exists: %s" % build
-    assert exists(src), "expected src/ dir doesn't exist: %s" % src
-
-    # Make the bundle.
-    fspkg = 'file://%s/FSPkg' %join(here, 'packages')
-    pkg_lines = textwrap.dedent('''\
-            -e %s
-            -e %s#egg=initools-dev
-            pip''' % (fspkg, local_checkout('svn+http://svn.colorstudy.com/INITools/trunk')))
-    write_file('bundle-req.txt', pkg_lines)
-    run_pip('bundle', '-r', 'bundle-req.txt', 'test.pybundle')
-    build_bundle = env.scratch_path/"build-bundle"
-    src_bundle = env.scratch_path/"src-bundle"
-    assert not exists(build_bundle), "build-bundle/ dir still exists: %s" % build_bundle
-    assert not exists(src_bundle), "src-bundle/ dir still exists: %s" % src_bundle
-
-    # Make sure previously created src/ from editable still exists
-    assert exists(src), "expected src dir doesn't exist: %s" % src
-
-
-def test_no_install_and_download_should_not_leave_build_dir():
-    """
-    It should remove build/ dir if it was pip that created
-    """
-    env = reset_env()
-    mkdir('downloaded_packages')
-    assert not os.path.exists(env.venv_path/'/build')
-    result = run_pip('install', '--no-install', 'INITools==0.2', '-d', 'downloaded_packages')
-    assert Path('scratch')/'downloaded_packages/build' not in result.files_created, 'pip should not leave build/ dir'
-    assert not os.path.exists(env.venv_path/'/build'), "build/ dir should be deleted"
-
-
-def test_download_should_not_delete_existing_build_dir():
-    """
-    It should not delete build/ if existing before run the command
-    """
-    env = reset_env()
-    mkdir(env.venv_path/'build')
-    f = open(env.venv_path/'build'/'somefile.txt', 'w')
-    f.write('I am not empty!')
-    f.close()
-    run_pip('install', '--no-install', 'INITools==0.2', '-d', '.')
-    f = open(env.venv_path/'build'/'somefile.txt')
-    content = f.read()
-    f.close()
-    assert os.path.exists(env.venv_path/'build'), "build/ should be left if it exists before pip run"
-    assert content == 'I am not empty!', "it should not affect build/ and its content"
-    assert ['somefile.txt'] == os.listdir(env.venv_path/'build')
diff --git a/vendor/pip-1.2.1/tests/test_compat.py b/vendor/pip-1.2.1/tests/test_compat.py
deleted file mode 100644
index 611d3f964d45311ca69f061cf4b060ea24baf010..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_compat.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""
-Tests for compatibility workarounds.
-
-"""
-import os
-from tests.test_pip import (here, reset_env, run_pip, pyversion,
-                            assert_all_changes)
-
-
-def test_debian_egg_name_workaround():
-    """
-    We can uninstall packages installed with the pyversion removed from the
-    egg-info metadata directory name.
-
-    Refs:
-    http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
-    https://bugs.launchpad.net/ubuntu/+source/distribute/+bug/725178
-    https://bitbucket.org/ianb/pip/issue/104/pip-uninstall-on-ubuntu-linux
-
-    """
-    env = reset_env()
-    result = run_pip('install', 'INITools==0.2', expect_error=True)
-
-    egg_info = os.path.join(
-        env.site_packages, "INITools-0.2-py%s.egg-info" % pyversion)
-
-    # Debian only removes pyversion for global installs, not inside a venv
-    # so even if this test runs on a Debian/Ubuntu system with broken setuptools,
-    # since our test runs inside a venv we'll still have the normal .egg-info
-    assert egg_info in result.files_created, "Couldn't find %s" % egg_info
-
-    # The Debian no-pyversion version of the .egg-info
-    mangled = os.path.join(env.site_packages, "INITools-0.2.egg-info")
-    assert mangled not in result.files_created, "Found unexpected %s" % mangled
-
-    # Simulate a Debian install by copying the .egg-info to their name for it
-    full_egg_info = os.path.join(env.root_path, egg_info)
-    assert os.path.isdir(full_egg_info)
-    full_mangled = os.path.join(env.root_path, mangled)
-    os.renames(full_egg_info, full_mangled)
-    assert os.path.isdir(full_mangled)
-
-    # Try the uninstall and verify that everything is removed.
-    result2 = run_pip("uninstall", "INITools", "-y")
-    assert_all_changes(result, result2, [env.venv/'build', 'cache'])
-
-
-def test_setup_py_with_dos_line_endings():
-    """
-    It doesn't choke on a setup.py file that uses DOS line endings (\\r\\n).
-
-    Refs https://github.com/pypa/pip/issues/237
-    """
-    reset_env()
-    to_install = os.path.abspath(os.path.join(here, 'packages', 'LineEndings'))
-    run_pip('install', to_install, expect_error=False)
diff --git a/vendor/pip-1.2.1/tests/test_completion.py b/vendor/pip-1.2.1/tests/test_completion.py
deleted file mode 100644
index 9381cf11d1ba521305ce89c80975eaab17016c31..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_completion.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import os
-from tests.test_pip import reset_env, run_pip, get_env
-
-
-def test_completion_for_bash():
-    """
-    Test getting completion for bash shell
-    """
-    reset_env()
-    bash_completion = """\
-_pip_completion()
-{
-    COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
-                   COMP_CWORD=$COMP_CWORD \\
-                   PIP_AUTO_COMPLETE=1 $1 ) )
-}
-complete -o default -F _pip_completion pip"""
-
-    result = run_pip('completion', '--bash')
-    assert bash_completion in result.stdout, 'bash completion is wrong'
-
-
-def test_completion_for_zsh():
-    """
-    Test getting completion for zsh shell
-    """
-    reset_env()
-    zsh_completion = """\
-function _pip_completion {
-  local words cword
-  read -Ac words
-  read -cn cword
-  reply=( $( COMP_WORDS="$words[*]" \\
-             COMP_CWORD=$(( cword-1 )) \\
-             PIP_AUTO_COMPLETE=1 $words[1] ) )
-}
-compctl -K _pip_completion pip"""
-
-    result = run_pip('completion', '--zsh')
-    assert zsh_completion in result.stdout, 'zsh completion is wrong'
-
-
-def test_completion_for_unknown_shell():
-    """
-    Test getting completion for an unknown shell
-    """
-    reset_env()
-    error_msg = 'error: no such option: --myfooshell'
-    result = run_pip('completion', '--myfooshell', expect_error=True)
-    assert error_msg in result.stderr, 'tests for an unknown shell failed'
-
-
-def test_completion_alone():
-    """
-    Test getting completion for none shell, just pip completion
-    """
-    reset_env()
-    result = run_pip('completion', expect_error=True)
-    assert 'ERROR: You must pass --bash or --zsh' in result.stderr,\
-            'completion alone failed -- ' + result.stderr
-
-
-def test_completion_for_un_snippet():
-    """
-    Test getting completion for ``un`` should return
-    uninstall and unzip
-    """
-    environ = os.environ.copy()
-    reset_env(environ)
-    environ['PIP_AUTO_COMPLETE'] = '1'
-    environ['COMP_WORDS'] = 'pip un'
-    environ['COMP_CWORD'] = '1'
-    env = get_env()
-    # expect_error is True because autocomplete exists with 1 status code
-    result = env.run('python', '-c', 'import pip;pip.autocomplete()',
-            expect_error=True)
-    assert result.stdout.strip().split() == ['unzip', 'uninstall'],\
-           "autocomplete function could not complete ``un`` snippet"
-
-
-def test_completion_for_default_parameters():
-    """
-    Test getting completion for ``--`` should contain --help
-    """
-    environ = os.environ.copy()
-    reset_env(environ)
-    environ['PIP_AUTO_COMPLETE'] = '1'
-    environ['COMP_WORDS'] = 'pip --'
-    environ['COMP_CWORD'] = '1'
-    env = get_env()
-    # expect_error is True because autocomplete exists with 1 status code
-    result = env.run('python', '-c', 'import pip;pip.autocomplete()',
-            expect_error=True)
-    assert '--help' in result.stdout,\
-           "autocomplete function could not complete ``--``"
diff --git a/vendor/pip-1.2.1/tests/test_config.py b/vendor/pip-1.2.1/tests/test_config.py
deleted file mode 100644
index 477f868f2414960708721be2048140af0687d692..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_config.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import os
-import tempfile
-import textwrap
-from tests.test_pip import reset_env, run_pip, clear_environ, write_file
-
-
-def test_options_from_env_vars():
-    """
-    Test if ConfigOptionParser reads env vars (e.g. not using PyPI here)
-
-    """
-    environ = clear_environ(os.environ.copy())
-    environ['PIP_NO_INDEX'] = '1'
-    reset_env(environ)
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "Ignoring indexes:" in result.stdout, str(result)
-    assert "DistributionNotFound: No distributions at all found for INITools" in result.stdout
-
-
-def test_command_line_options_override_env_vars():
-    """
-    Test that command line options override environmental variables.
-
-    """
-    environ = clear_environ(os.environ.copy())
-    environ['PIP_INDEX_URL'] = 'http://b.pypi.python.org/simple/'
-    reset_env(environ)
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "Getting page http://b.pypi.python.org/simple/INITools" in result.stdout
-    reset_env(environ)
-    result = run_pip('install', '-vvv', '--index-url', 'http://download.zope.org/ppix', 'INITools', expect_error=True)
-    assert "b.pypi.python.org" not in result.stdout
-    assert "Getting page http://download.zope.org/ppix" in result.stdout
-
-
-def test_env_vars_override_config_file():
-    """
-    Test that environmental variables override settings in config files.
-
-    """
-    fd, config_file = tempfile.mkstemp('-pip.cfg', 'test-')
-    try:
-        _test_env_vars_override_config_file(config_file)
-    finally:
-        # `os.close` is a workaround for a bug in subprocess
-        # http://bugs.python.org/issue3210
-        os.close(fd)
-        os.remove(config_file)
-
-
-def _test_env_vars_override_config_file(config_file):
-    environ = clear_environ(os.environ.copy())
-    environ['PIP_CONFIG_FILE'] = config_file # set this to make pip load it
-    reset_env(environ)
-    # It's important that we test this particular config value ('no-index')
-    # because their is/was a bug which only shows up in cases in which
-    # 'config-item' and 'config_item' hash to the same value modulo the size
-    # of the config dictionary.
-    write_file(config_file, textwrap.dedent("""\
-        [global]
-        no-index = 1
-        """))
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "DistributionNotFound: No distributions at all found for INITools" in result.stdout
-    environ['PIP_NO_INDEX'] = '0'
-    reset_env(environ)
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "Successfully installed INITools" in result.stdout
-
-
-def test_command_line_append_flags():
-    """
-    Test command line flags that append to defaults set by environmental variables.
-
-    """
-    environ = clear_environ(os.environ.copy())
-    environ['PIP_FIND_LINKS'] = 'http://pypi.pinaxproject.com'
-    reset_env(environ)
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "Analyzing links from page http://pypi.pinaxproject.com" in result.stdout
-    reset_env(environ)
-    result = run_pip('install', '-vvv', '--find-links', 'http://example.com', 'INITools', expect_error=True)
-    assert "Analyzing links from page http://pypi.pinaxproject.com" in result.stdout
-    assert "Analyzing links from page http://example.com" in result.stdout
-
-
-def test_command_line_appends_correctly():
-    """
-    Test multiple appending options set by environmental variables.
-
-    """
-    environ = clear_environ(os.environ.copy())
-    environ['PIP_FIND_LINKS'] = 'http://pypi.pinaxproject.com http://example.com'
-    reset_env(environ)
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-
-    assert "Analyzing links from page http://pypi.pinaxproject.com" in result.stdout, result.stdout
-    assert "Analyzing links from page http://example.com" in result.stdout, result.stdout
-
-
-def test_config_file_override_stack():
-    """
-    Test config files (global, overriding a global config with a
-    local, overriding all with a command line flag).
-
-    """
-    fd, config_file = tempfile.mkstemp('-pip.cfg', 'test-')
-    try:
-        _test_config_file_override_stack(config_file)
-    finally:
-        # `os.close` is a workaround for a bug in subprocess
-        # http://bugs.python.org/issue3210
-        os.close(fd)
-        os.remove(config_file)
-
-
-def _test_config_file_override_stack(config_file):
-    environ = clear_environ(os.environ.copy())
-    environ['PIP_CONFIG_FILE'] = config_file # set this to make pip load it
-    reset_env(environ)
-    write_file(config_file, textwrap.dedent("""\
-        [global]
-        index-url = http://download.zope.org/ppix
-        """))
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "Getting page http://download.zope.org/ppix/INITools" in result.stdout
-    reset_env(environ)
-    write_file(config_file, textwrap.dedent("""\
-        [global]
-        index-url = http://download.zope.org/ppix
-        [install]
-        index-url = http://pypi.appspot.com/
-        """))
-    result = run_pip('install', '-vvv', 'INITools', expect_error=True)
-    assert "Getting page http://pypi.appspot.com/INITools" in result.stdout
-    result = run_pip('install', '-vvv', '--index-url', 'http://pypi.python.org/simple', 'INITools', expect_error=True)
-    assert "Getting page http://download.zope.org/ppix/INITools" not in result.stdout
-    assert "Getting page http://pypi.appspot.com/INITools" not in result.stdout
-    assert "Getting page http://pypi.python.org/simple/INITools" in result.stdout
-
-
-def test_log_file_no_directory():
-    """
-    Test opening a log file with no directory name.
-
-    """
-    from pip.basecommand import open_logfile
-    fp = open_logfile('testpip.log')
-    fp.write('can write')
-    fp.close()
-    assert os.path.exists(fp.name)
-    os.remove(fp.name)
diff --git a/vendor/pip-1.2.1/tests/test_download.py b/vendor/pip-1.2.1/tests/test_download.py
deleted file mode 100644
index 5d4923ccdedbd8da96e7043e9624b32773b14956..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_download.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from pip.backwardcompat import any
-
-import textwrap
-from tests.test_pip import reset_env, run_pip, write_file
-from tests.path import Path
-
-
-def test_download_if_requested():
-    """
-    It should download (in the scratch path) and not install if requested.
-    """
-
-    env = reset_env()
-    result = run_pip('install', 'INITools==0.1', '-d', '.', expect_error=True)
-    assert Path('scratch')/ 'INITools-0.1.tar.gz' in result.files_created
-    assert env.site_packages/ 'initools' not in result.files_created
-
-
-def test_single_download_from_requirements_file():
-    """
-    It should support download (in the scratch path) from PyPi from a requirements file
-    """
-
-    env = reset_env()
-    write_file('test-req.txt', textwrap.dedent("""
-        INITools==0.1
-        """))
-    result = run_pip('install', '-r', env.scratch_path/ 'test-req.txt', '-d', '.', expect_error=True)
-    assert Path('scratch')/ 'INITools-0.1.tar.gz' in result.files_created
-    assert env.site_packages/ 'initools' not in result.files_created
-
-
-def test_download_should_download_dependencies():
-    """
-    It should download dependencies (in the scratch path)
-    """
-
-    env = reset_env()
-    result = run_pip('install', 'Paste[openid]==1.7.5.1', '-d', '.', expect_error=True)
-    assert Path('scratch')/ 'Paste-1.7.5.1.tar.gz' in result.files_created
-    openid_tarball_prefix = str(Path('scratch')/ 'python-openid-')
-    assert any(path.startswith(openid_tarball_prefix) for path in result.files_created)
-    assert env.site_packages/ 'openid' not in result.files_created
diff --git a/vendor/pip-1.2.1/tests/test_extras.py b/vendor/pip-1.2.1/tests/test_extras.py
deleted file mode 100644
index 163893e877177cb1afcbfa0f328a21c16f01e5c0..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_extras.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from os.path import join
-
-from tests.test_pip import reset_env, run_pip
-
-
-def test_simple_extras_install_from_pypi():
-    """
-    Test installing a package from PyPI using extras dependency Paste[openid].
-    """
-    e = reset_env()
-    result = run_pip('install', 'Paste[openid]==1.7.5.1', expect_stderr=True)
-    initools_folder = e.site_packages / 'openid'
-    assert initools_folder in result.files_created, result.files_created
-
-
-def test_no_extras_uninstall():
-    """
-    No extras dependency gets uninstalled when the root package is uninstalled
-    """
-    env = reset_env()
-    result = run_pip('install', 'Paste[openid]==1.7.5.1', expect_stderr=True)
-    assert join(env.site_packages, 'paste') in result.files_created, sorted(result.files_created.keys())
-    assert join(env.site_packages, 'openid') in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('uninstall', 'Paste', '-y')
-    # openid should not be uninstalled
-    initools_folder = env.site_packages / 'openid'
-    assert not initools_folder in result2.files_deleted, result.files_deleted
diff --git a/vendor/pip-1.2.1/tests/test_file_scheme_index.py b/vendor/pip-1.2.1/tests/test_file_scheme_index.py
deleted file mode 100644
index 38f6654b9244ec6c7036177a7b646108ae42a4d2..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_file_scheme_index.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from pip.backwardcompat import urllib
-from tests.test_pip import here, reset_env, run_pip, pyversion
-from tests.path import Path
-
-index_url = 'file://' + urllib.quote(str(Path(here).abspath/'in dex').replace('\\', '/'))
-
-
-def test_install():
-    """
-    Test installing from a local index.
-
-    """
-    env = reset_env()
-    result = run_pip('install', '-vvv', '--index-url', index_url, 'FSPkg', expect_error=False)
-    assert (env.site_packages/'fspkg') in result.files_created, str(result.stdout)
-    assert (env.site_packages/'FSPkg-0.1dev-py%s.egg-info' % pyversion) in result.files_created, str(result)
diff --git a/vendor/pip-1.2.1/tests/test_finder.py b/vendor/pip-1.2.1/tests/test_finder.py
deleted file mode 100644
index 4c930ed816f30f32f42ebbdc36df139c71513b0b..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_finder.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from pip.backwardcompat import urllib
-
-from pip.req import InstallRequirement
-from pip.index import PackageFinder
-
-from tests.path import Path
-from tests.test_pip import here
-
-find_links = 'file://' + urllib.quote(str(Path(here).abspath/'packages').replace('\\', '/'))
-
-
-def test_no_mpkg():
-    """Finder skips zipfiles with "macosx10" in the name."""
-    finder = PackageFinder([find_links], [])
-    req = InstallRequirement.from_line("pkgwithmpkg")
-    found = finder.find_requirement(req, False)
-
-    assert found.url.endswith("pkgwithmpkg-1.0.tar.gz"), found
diff --git a/vendor/pip-1.2.1/tests/test_freeze.py b/vendor/pip-1.2.1/tests/test_freeze.py
deleted file mode 100644
index 4412c1435402d4da9c38a350ef5ea0b1f46e1083..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_freeze.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import sys
-import re
-import textwrap
-from doctest import OutputChecker, ELLIPSIS
-from tests.test_pip import reset_env, run_pip, write_file, get_env, pyversion
-from tests.local_repos import local_checkout, local_repo
-
-
-distribute_re = re.compile('^distribute==[0-9.]+\n', re.MULTILINE)
-
-
-def _check_output(result, expected):
-    checker = OutputChecker()
-    actual = str(result)
-
-    ## FIXME!  The following is a TOTAL hack.  For some reason the
-    ## __str__ result for pkg_resources.Requirement gets downcased on
-    ## Windows.  Since INITools is the only package we're installing
-    ## in this file with funky case requirements, I'm forcibly
-    ## upcasing it.  You can also normalize everything to lowercase,
-    ## but then you have to remember to upcase <BLANKLINE>.  The right
-    ## thing to do in the end is probably to find out how to report
-    ## the proper fully-cased package name in our error message.
-    if sys.platform == 'win32':
-        actual = actual.replace('initools', 'INITools')
-
-    # This allows our existing tests to work when run in a context
-    # with distribute installed.
-    actual = distribute_re.sub('', actual)
-
-    def banner(msg):
-        return '\n========== %s ==========\n' % msg
-    assert checker.check_output(expected, actual, ELLIPSIS), banner('EXPECTED')+expected+banner('ACTUAL')+actual+banner(6*'=')
-
-
-def test_freeze_basic():
-    """
-    Some tests of freeze, first we have to install some stuff.  Note that
-    the test is a little crude at the end because Python 2.5+ adds egg
-    info to the standard library, so stuff like wsgiref will show up in
-    the freezing.  (Probably that should be accounted for in pip, but
-    currently it is not).
-
-    """
-    env = reset_env()
-    write_file('initools-req.txt', textwrap.dedent("""\
-        INITools==0.2
-        # and something else to test out:
-        MarkupSafe<=0.12
-        """))
-    result = run_pip('install', '-r', env.scratch_path/'initools-req.txt')
-    result = run_pip('freeze', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: pip freeze
-        -- stdout: --------------------
-        INITools==0.2
-        MarkupSafe==0.12...
-        <BLANKLINE>""")
-    _check_output(result, expected)
-
-
-def test_freeze_svn():
-    """Now lets try it with an svn checkout"""
-    env = reset_env()
-    result = env.run('svn', 'co', '-r10',
-                     local_repo('svn+http://svn.colorstudy.com/INITools/trunk'),
-                     'initools-trunk')
-    result = env.run('python', 'setup.py', 'develop',
-            cwd=env.scratch_path/ 'initools-trunk', expect_stderr=True)
-    result = run_pip('freeze', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze
-        -- stdout: --------------------
-        -e %s@10#egg=INITools-0.3.1dev...-dev_r10
-        ...""" % local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'))
-    _check_output(result, expected)
-
-
-def test_freeze_git_clone():
-    """
-    Test freezing a Git clone.
-
-    """
-    env = reset_env()
-    result = env.run('git', 'clone', local_repo('git+http://github.com/pypa/pip-test-package.git'), 'pip-test-package')
-    result = env.run('git', 'checkout', '7d654e66c8fa7149c165ddeffa5b56bc06619458',
-            cwd=env.scratch_path / 'pip-test-package', expect_stderr=True)
-    result = env.run('python', 'setup.py', 'develop',
-            cwd=env.scratch_path / 'pip-test-package')
-    result = run_pip('freeze', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze
-        -- stdout: --------------------
-        -e %s@...#egg=pip_test_package-...
-        ...""" % local_checkout('git+http://github.com/pypa/pip-test-package.git'))
-    _check_output(result, expected)
-
-    result = run_pip('freeze', '-f',
-                     '%s#egg=pip_test_package' % local_checkout('git+http://github.com/pypa/pip-test-package.git'),
-                     expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: pip freeze -f %(repo)s#egg=pip_test_package
-        -- stdout: --------------------
-        -f %(repo)s#egg=pip_test_package
-        -e %(repo)s@...#egg=pip_test_package-dev
-        ...""" % {'repo': local_checkout('git+http://github.com/pypa/pip-test-package.git')})
-    _check_output(result, expected)
-
-
-def test_freeze_mercurial_clone():
-    """
-    Test freezing a Mercurial clone.
-
-    """
-    reset_env()
-    env = get_env()
-    result = env.run('hg', 'clone',
-                     '-r', '7bc186caa7dc',
-                     local_repo('hg+http://bitbucket.org/jezdez/django-authority'),
-                     'django-authority')
-    result = env.run('python', 'setup.py', 'develop',
-            cwd=env.scratch_path/'django-authority', expect_stderr=True)
-    result = run_pip('freeze', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze
-        -- stdout: --------------------
-        -e %s@...#egg=django_authority-...
-        ...""" % local_checkout('hg+http://bitbucket.org/jezdez/django-authority'))
-    _check_output(result, expected)
-
-    result = run_pip('freeze', '-f',
-                     '%s#egg=django_authority' % local_checkout('hg+http://bitbucket.org/jezdez/django-authority'),
-                     expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze -f %(repo)s#egg=django_authority
-        -- stdout: --------------------
-        -f %(repo)s#egg=django_authority
-        -e %(repo)s@...#egg=django_authority-dev
-        ...""" % {'repo': local_checkout('hg+http://bitbucket.org/jezdez/django-authority')})
-    _check_output(result, expected)
-
-
-def test_freeze_bazaar_clone():
-    """
-    Test freezing a Bazaar clone.
-
-    """
-    reset_env()
-    env = get_env()
-    result = env.run('bzr', 'checkout', '-r', '174',
-                     local_repo('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'),
-                     'django-wikiapp')
-    result = env.run('python', 'setup.py', 'develop',
-            cwd=env.scratch_path/'django-wikiapp')
-    result = run_pip('freeze', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze
-        -- stdout: --------------------
-        -e %s@...#egg=django_wikiapp-...
-        ...""" % local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'))
-    _check_output(result, expected)
-
-    result = run_pip('freeze', '-f',
-                     '%s/#egg=django-wikiapp' %
-                     local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1'),
-                     expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze -f %(repo)s/#egg=django-wikiapp
-        -- stdout: --------------------
-        -f %(repo)s/#egg=django-wikiapp
-        -e %(repo)s@...#egg=django_wikiapp-...
-        ...""" % {'repo':
-                  local_checkout('bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp/release-0.1')})
-    _check_output(result, expected)
-
-
-def test_freeze_with_local_option():
-    """
-    Test that wsgiref (from global site-packages) is reported normally, but not with --local.
-
-    """
-    reset_env()
-    result = run_pip('install', 'initools==0.2')
-    result = run_pip('freeze', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze
-        -- stdout: --------------------
-        INITools==0.2
-        wsgiref==...
-        <BLANKLINE>""")
-
-    # The following check is broken (see
-    # http://bitbucket.org/ianb/pip/issue/110).  For now we are simply
-    # neutering this test, but if we can't find a way to fix it,
-    # this whole function should be removed.
-
-    # _check_output(result, expected)
-
-    result = run_pip('freeze', '--local', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: ...pip freeze --local
-        -- stdout: --------------------
-        INITools==0.2
-        <BLANKLINE>""")
-    _check_output(result, expected)
-
-
-def test_freeze_with_requirement_option():
-    """
-    Test that new requirements are created correctly with --requirement hints
-
-    """
-    reset_env()
-    ignores = textwrap.dedent("""\
-        # Unchanged requirements below this line
-        -r ignore.txt
-        --requirement ignore.txt
-        -Z ignore
-        --always-unzip ignore
-        -f http://ignore
-        -i http://ignore
-        --extra-index-url http://ignore
-        --find-links http://ignore
-        --index-url http://ignore
-        """)
-    write_file('hint.txt', textwrap.dedent("""\
-        INITools==0.1
-        NoExist==4.2
-        """) + ignores)
-    result = run_pip('install', 'initools==0.2')
-    result = run_pip('install', 'MarkupSafe')
-    result = run_pip('freeze', '--requirement', 'hint.txt', expect_stderr=True)
-    expected = textwrap.dedent("""\
-        Script result: pip freeze --requirement hint.txt
-        -- stderr: --------------------
-        Requirement file contains NoExist==4.2, but that package is not installed
-
-        -- stdout: --------------------
-        INITools==0.2
-        """) + ignores + "## The following requirements were added by pip --freeze:..."
-    _check_output(result, expected)
diff --git a/vendor/pip-1.2.1/tests/test_help.py b/vendor/pip-1.2.1/tests/test_help.py
deleted file mode 100644
index e638963e77a1c41327339c0231840e4feecf15bf..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_help.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from pip.exceptions import CommandError
-from pip.commands.help import (HelpCommand,
-                               SUCCESS,
-                               ERROR,)
-from mock import Mock
-from nose.tools import assert_raises
-from tests.test_pip import run_pip, reset_env
-
-
-def test_run_method_should_return_sucess_when_finds_command_name():
-    """
-    Test HelpCommand.run for existing command
-    """
-    options_mock = Mock()
-    args = ('freeze',)
-    help_cmd = HelpCommand()
-    status = help_cmd.run(options_mock, args)
-    assert status == SUCCESS
-
-
-def test_run_method_should_return_sucess_when_command_name_not_specified():
-    """
-    Test HelpCommand.run when there are no args
-    """
-    options_mock = Mock()
-    args = ()
-    help_cmd = HelpCommand()
-    status = help_cmd.run(options_mock, args)
-    assert status == SUCCESS
-
-
-def test_run_method_should_raise_command_error_when_command_does_not_exist():
-    """
-    Test HelpCommand.run for non-existing command
-    """
-    options_mock = Mock()
-    args = ('mycommand',)
-    help_cmd = HelpCommand()
-    assert_raises(CommandError, help_cmd.run, options_mock, args)
-
-
-def test_help_command_should_exit_status_ok_when_command_exists():
-    """
-    Test `help` command for existing command
-    """
-    reset_env()
-    result = run_pip('help', 'freeze')
-    assert result.returncode == SUCCESS
-
-
-def test_help_command_should_exit_status_ok_when_no_command_is_specified():
-    """
-    Test `help` command for no command
-    """
-    reset_env()
-    result = run_pip('help')
-    assert result.returncode == SUCCESS
-
-
-def test_help_command_should_exit_status_error_when_command_does_not_exist():
-    """
-    Test `help` command for non-existing command
-    """
-    reset_env()
-    result = run_pip('help', 'mycommand', expect_error=True)
-    assert result.returncode == ERROR
diff --git a/vendor/pip-1.2.1/tests/test_index.py b/vendor/pip-1.2.1/tests/test_index.py
deleted file mode 100644
index 6f9d216dc05c86f8375bf49fb828ae7fb929ad71..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_index.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from pip.index import package_to_requirement, HTMLPage
-
-
-def test_package_name_should_be_converted_to_requirement():
-    """
-    Test that it translates a name like Foo-1.2 to Foo==1.3
-    """
-    assert package_to_requirement('Foo-1.2') == 'Foo==1.2'
-    assert package_to_requirement('Foo-dev') == 'Foo==dev'
-    assert package_to_requirement('Foo') == 'Foo'
-
-
-def test_html_page_should_be_able_to_scrap_rel_links():
-    """
-    Test scraping page looking for url in href
-    """
-    page = HTMLPage("""
-        <!-- The <th> elements below are a terrible terrible hack for setuptools -->
-        <li>
-        <strong>Home Page:</strong>
-        <!-- <th>Home Page -->
-        <a href="http://supervisord.org/">http://supervisord.org/</a>
-        </li>""", "supervisor")
-
-    links = list(page.scraped_rel_links())
-    assert len(links) == 1
-    assert links[0].url == 'http://supervisord.org/'
-
diff --git a/vendor/pip-1.2.1/tests/test_pip.py b/vendor/pip-1.2.1/tests/test_pip.py
deleted file mode 100644
index 17e8f6616d6d8486caabdb42ef94a877e1c51ad4..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_pip.py
+++ /dev/null
@@ -1,618 +0,0 @@
-#!/usr/bin/env python
-import os
-import sys
-import tempfile
-import shutil
-import glob
-import atexit
-import textwrap
-import site
-
-from scripttest import TestFileEnvironment, FoundDir
-from tests.path import Path, curdir, u
-from pip.util import rmtree
-
-pyversion = sys.version[:3]
-
-# the directory containing all the tests
-here = Path(__file__).abspath.folder
-
-# the root of this pip source distribution
-src_folder = here.folder
-download_cache = tempfile.mkdtemp(prefix='pip-test-cache')
-site_packages_suffix = site.USER_SITE[len(site.USER_BASE) + 1:]
-
-
-def path_to_url(path):
-    """
-    Convert a path to URI. The path will be made absolute and
-    will not have quoted path parts.
-    (adapted from pip.util)
-    """
-    path = os.path.normpath(os.path.abspath(path))
-    drive, path = os.path.splitdrive(path)
-    filepath = path.split(os.path.sep)
-    url = '/'.join(filepath)
-    if drive:
-        return 'file:///' + drive + url
-    return 'file://' +url
-
-
-def demand_dirs(path):
-    if not os.path.exists(path):
-        os.makedirs(path)
-
-
-# Tweak the path so we can find up-to-date pip sources
-# (http://bitbucket.org/ianb/pip/issue/98)
-sys.path = [src_folder] + sys.path
-
-
-def create_virtualenv(where, distribute=False):
-    import virtualenv
-    if sys.version_info[0] > 2:
-        distribute = True
-    virtualenv.create_environment(
-        where, use_distribute=distribute, unzip_setuptools=True)
-
-    return virtualenv.path_locations(where)
-
-
-def relpath(root, other):
-    """a poor man's os.path.relpath, since we may not have Python 2.6"""
-    prefix = root+Path.sep
-    assert other.startswith(prefix)
-    return Path(other[len(prefix):])
-
-if 'PYTHONPATH' in os.environ:
-    del os.environ['PYTHONPATH']
-
-
-try:
-    any
-except NameError:
-
-    def any(seq):
-        for item in seq:
-            if item:
-                return True
-        return False
-
-
-def clear_environ(environ):
-    return dict(((k, v) for k, v in environ.items()
-                if not k.lower().startswith('pip_')))
-
-
-def install_setuptools(env):
-    easy_install = os.path.join(env.bin_path, 'easy_install')
-    version = 'setuptools==0.6c11'
-    if sys.platform != 'win32':
-        return env.run(easy_install, version)
-
-    tempdir = tempfile.mkdtemp()
-    try:
-        for f in glob.glob(easy_install+'*'):
-            shutil.copy2(f, tempdir)
-        return env.run(os.path.join(tempdir, 'easy_install'), version)
-    finally:
-        rmtree(tempdir)
-
-
-env = None
-
-
-def reset_env(environ=None, use_distribute=None):
-    global env
-    # FastTestPipEnv reuses env, not safe if use_distribute specified
-    if use_distribute is None:
-        env = FastTestPipEnvironment(environ)
-    else:
-        env = TestPipEnvironment(environ, use_distribute=use_distribute)
-    return env
-
-
-class TestFailure(AssertionError):
-    """
-
-    An "assertion" failed during testing.
-
-    """
-    pass
-
-
-#
-# This cleanup routine prevents the __del__ method that cleans up the tree of
-# the last TestPipEnvironment from firing after shutil has already been
-# unloaded.  It also ensures that FastTestPipEnvironment doesn't leave an
-# environment hanging around that might confuse the next test run.
-#
-def _cleanup():
-    global env
-    del env
-    rmtree(download_cache, ignore_errors=True)
-    rmtree(fast_test_env_root, ignore_errors=True)
-    rmtree(fast_test_env_backup, ignore_errors=True)
-
-atexit.register(_cleanup)
-
-
-class TestPipResult(object):
-
-    def __init__(self, impl, verbose=False):
-        self._impl = impl
-
-        if verbose:
-            print(self.stdout)
-            if self.stderr:
-                print('======= stderr ========')
-                print(self.stderr)
-                print('=======================')
-
-    def __getattr__(self, attr):
-        return getattr(self._impl, attr)
-
-    if sys.platform == 'win32':
-
-        @property
-        def stdout(self):
-            return self._impl.stdout.replace('\r\n', '\n')
-
-        @property
-        def stderr(self):
-            return self._impl.stderr.replace('\r\n', '\n')
-
-        def __str__(self):
-            return str(self._impl).replace('\r\n', '\n')
-    else:
-        # Python doesn't automatically forward __str__ through __getattr__
-
-        def __str__(self):
-            return str(self._impl)
-
-    def assert_installed(self, pkg_name, with_files=[], without_files=[], without_egg_link=False, use_user_site=False):
-        e = self.test_env
-
-        pkg_dir = e.venv/ 'src'/ pkg_name.lower()
-
-        if use_user_site:
-            egg_link_path = e.user_site / pkg_name + '.egg-link'
-        else:
-            egg_link_path = e.site_packages / pkg_name + '.egg-link'
-        if without_egg_link:
-            if egg_link_path in self.files_created:
-                raise TestFailure('unexpected egg link file created: '\
-                                  '%r\n%s' % (egg_link_path, self))
-        else:
-            if not egg_link_path in self.files_created:
-                raise TestFailure('expected egg link file missing: '\
-                                  '%r\n%s' % (egg_link_path, self))
-
-            egg_link_file = self.files_created[egg_link_path]
-
-            if not (# FIXME: I don't understand why there's a trailing . here
-                    egg_link_file.bytes.endswith('.')
-                and egg_link_file.bytes[:-1].strip().endswith(pkg_dir)):
-                raise TestFailure(textwrap.dedent(u('''\
-                Incorrect egg_link file %r
-                Expected ending: %r
-                ------- Actual contents -------
-                %s
-                -------------------------------''' % (
-                        egg_link_file,
-                        pkg_dir + u('\n.'),
-                        egg_link_file.bytes))))
-
-        if use_user_site:
-            pth_file = Path.string(e.user_site / 'easy-install.pth')
-        else:
-            pth_file = Path.string(e.site_packages / 'easy-install.pth')
-
-        if (pth_file in self.files_updated) == without_egg_link:
-            raise TestFailure('%r unexpectedly %supdated by install' % (
-                pth_file, (not without_egg_link and 'not ' or '')))
-
-        if (pkg_dir in self.files_created) == (curdir in without_files):
-            raise TestFailure(textwrap.dedent('''\
-            expected package directory %r %sto be created
-            actually created:
-            %s
-            ''') % (
-                Path.string(pkg_dir),
-                (curdir in without_files and 'not ' or ''),
-                sorted(self.files_created.keys())))
-
-        for f in with_files:
-            if not (pkg_dir/f).normpath in self.files_created:
-                raise TestFailure('Package directory %r missing '\
-                                  'expected content %f' % (pkg_dir, f))
-
-        for f in without_files:
-            if (pkg_dir/f).normpath in self.files_created:
-                raise TestFailure('Package directory %r has '\
-                                  'unexpected content %f' % (pkg_dir, f))
-
-
-class TestPipEnvironment(TestFileEnvironment):
-    """A specialized TestFileEnvironment for testing pip"""
-
-    #
-    # Attribute naming convention
-    # ---------------------------
-    #
-    # Instances of this class have many attributes representing paths
-    # in the filesystem.  To keep things straight, absolute paths have
-    # a name of the form xxxx_path and relative paths have a name that
-    # does not end in '_path'.
-
-    # The following paths are relative to the root_path, and should be
-    # treated by clients as instance attributes.  The fact that they
-    # are defined in the class is an implementation detail
-
-    # where we'll create the virtual Python installation for testing
-    #
-    # Named with a leading dot to reduce the chance of spurious
-    # results due to being mistaken for the virtualenv package.
-    venv = Path('.virtualenv')
-
-    # The root of a directory tree to be used arbitrarily by tests
-    scratch = Path('scratch')
-
-    exe = sys.platform == 'win32' and '.exe' or ''
-
-    verbose = False
-
-    def __init__(self, environ=None, use_distribute=None):
-
-        self.root_path = Path(tempfile.mkdtemp('-piptest'))
-
-        # We will set up a virtual environment at root_path.
-        self.scratch_path = self.root_path / self.scratch
-
-        self.venv_path = self.root_path / self.venv
-
-        if not environ:
-            environ = os.environ.copy()
-            environ = clear_environ(environ)
-            environ['PIP_DOWNLOAD_CACHE'] = str(download_cache)
-
-        environ['PIP_NO_INPUT'] = '1'
-        environ['PIP_LOG_FILE'] = str(self.root_path/'pip-log.txt')
-
-        super(TestPipEnvironment, self).__init__(
-            self.root_path, ignore_hidden=False,
-            environ=environ, split_cmd=False, start_clear=False,
-            cwd=self.scratch_path, capture_temp=True, assert_no_temp=True)
-
-        demand_dirs(self.venv_path)
-        demand_dirs(self.scratch_path)
-
-        if use_distribute is None:
-            use_distribute = os.environ.get('PIP_TEST_USE_DISTRIBUTE', False)
-        self.use_distribute = use_distribute
-
-        # Create a virtualenv and remember where it's putting things.
-        virtualenv_paths = create_virtualenv(self.venv_path, distribute=self.use_distribute)
-
-        assert self.venv_path == virtualenv_paths[0] # sanity check
-
-        for id, path in zip(('venv', 'lib', 'include', 'bin'), virtualenv_paths):
-            setattr(self, id+'_path', Path(path))
-            setattr(self, id, relpath(self.root_path, path))
-
-        assert self.venv == TestPipEnvironment.venv # sanity check
-
-        self.site_packages = self.lib/'site-packages'
-        self.user_base_path = self.venv_path/'user'
-        self.user_site_path = self.venv_path/'user'/site_packages_suffix
-
-        self.user_site = relpath(self.root_path, self.user_site_path)
-        demand_dirs(self.user_site_path)
-        self.environ["PYTHONUSERBASE"] = self.user_base_path
-
-        # create easy-install.pth in user_site, so we always have it updated instead of created
-        open(self.user_site_path/'easy-install.pth', 'w').close()
-
-        # put the test-scratch virtualenv's bin dir first on the PATH
-        self.environ['PATH'] = Path.pathsep.join((self.bin_path, self.environ['PATH']))
-
-        # test that test-scratch virtualenv creation produced sensible venv python
-        result = self.run('python', '-c', 'import sys; print(sys.executable)')
-        pythonbin = result.stdout.strip()
-
-        if Path(pythonbin).noext != self.bin_path/'python':
-            raise RuntimeError(
-                "Oops! 'python' in our test environment runs %r"
-                " rather than expected %r" % (pythonbin, self.bin_path/'python'))
-
-        # make sure we have current setuptools to avoid svn incompatibilities
-        if not self.use_distribute:
-            install_setuptools(self)
-
-        # Uninstall whatever version of pip came with the virtualenv.
-        # Earlier versions of pip were incapable of
-        # self-uninstallation on Windows, so we use the one we're testing.
-        self.run('python', '-c',
-                 '"import sys; sys.path.insert(0, %r); import pip; sys.exit(pip.main());"' % os.path.dirname(here),
-                 'uninstall', '-vvv', '-y', 'pip')
-
-        # Install this version instead
-        self.run('python', 'setup.py', 'install', cwd=src_folder, expect_stderr=True)
-        self._use_cached_pypi_server()
-
-    def _ignore_file(self, fn):
-        if fn.endswith('__pycache__') or fn.endswith(".pyc"):
-            result = True
-        else:
-            result = super(TestPipEnvironment, self)._ignore_file(fn)
-        return result
-
-    def run(self, *args, **kw):
-        if self.verbose:
-            print('>> running %s %s' % (args, kw))
-        cwd = kw.pop('cwd', None)
-        run_from = kw.pop('run_from', None)
-        assert not cwd or not run_from, "Don't use run_from; it's going away"
-        cwd = Path.string(cwd or run_from or self.cwd)
-        assert not isinstance(cwd, Path)
-        return TestPipResult(super(TestPipEnvironment, self).run(cwd=cwd, *args, **kw), verbose=self.verbose)
-
-    def __del__(self):
-        rmtree(str(self.root_path), ignore_errors=True)
-
-    def _use_cached_pypi_server(self):
-        site_packages = self.root_path / self.site_packages
-        pth = open(os.path.join(site_packages, 'pypi_intercept.pth'), 'w')
-        pth.write('import sys; ')
-        pth.write('sys.path.insert(0, %r); ' % str(here))
-        pth.write('import pypi_server; pypi_server.PyPIProxy.setup(); ')
-        pth.write('sys.path.remove(%r); ' % str(here))
-        pth.close()
-
-
-fast_test_env_root = here / 'tests_cache' / 'test_ws'
-fast_test_env_backup = here / 'tests_cache' / 'test_ws_backup'
-
-
-class FastTestPipEnvironment(TestPipEnvironment):
-    def __init__(self, environ=None):
-        import virtualenv
-
-        self.root_path = fast_test_env_root
-        self.backup_path = fast_test_env_backup
-
-        self.scratch_path = self.root_path / self.scratch
-
-        # We will set up a virtual environment at root_path.
-        self.venv_path = self.root_path / self.venv
-
-        if not environ:
-            environ = os.environ.copy()
-            environ = clear_environ(environ)
-            environ['PIP_DOWNLOAD_CACHE'] = str(download_cache)
-
-        environ['PIP_NO_INPUT'] = '1'
-        environ['PIP_LOG_FILE'] = str(self.root_path/'pip-log.txt')
-
-        TestFileEnvironment.__init__(self,
-            self.root_path, ignore_hidden=False,
-            environ=environ, split_cmd=False, start_clear=False,
-            cwd=self.scratch_path, capture_temp=True, assert_no_temp=True)
-
-        virtualenv_paths = virtualenv.path_locations(self.venv_path)
-
-        for id, path in zip(('venv', 'lib', 'include', 'bin'), virtualenv_paths):
-            setattr(self, id+'_path', Path(path))
-            setattr(self, id, relpath(self.root_path, path))
-
-        assert self.venv == TestPipEnvironment.venv # sanity check
-
-        self.site_packages = self.lib/'site-packages'
-        self.user_base_path = self.venv_path/'user'
-        self.user_site_path = self.venv_path/'user'/'lib'/self.lib.name/'site-packages'
-
-        self.user_site = relpath(self.root_path, self.user_site_path)
-
-        self.environ["PYTHONUSERBASE"] = self.user_base_path
-
-        # put the test-scratch virtualenv's bin dir first on the PATH
-        self.environ['PATH'] = Path.pathsep.join((self.bin_path, self.environ['PATH']))
-
-        self.use_distribute = os.environ.get('PIP_TEST_USE_DISTRIBUTE', False)
-
-        if self.root_path.exists:
-            rmtree(self.root_path)
-        if self.backup_path.exists:
-            shutil.copytree(self.backup_path, self.root_path, True)
-        else:
-            demand_dirs(self.venv_path)
-            demand_dirs(self.scratch_path)
-
-            # Create a virtualenv and remember where it's putting things.
-            create_virtualenv(self.venv_path, distribute=self.use_distribute)
-
-            demand_dirs(self.user_site_path)
-
-            # create easy-install.pth in user_site, so we always have it updated instead of created
-            open(self.user_site_path/'easy-install.pth', 'w').close()
-
-            # test that test-scratch virtualenv creation produced sensible venv python
-            result = self.run('python', '-c', 'import sys; print(sys.executable)')
-            pythonbin = result.stdout.strip()
-
-            if Path(pythonbin).noext != self.bin_path/'python':
-                raise RuntimeError(
-                    "Oops! 'python' in our test environment runs %r"
-                    " rather than expected %r" % (pythonbin, self.bin_path/'python'))
-
-            # make sure we have current setuptools to avoid svn incompatibilities
-            if not self.use_distribute:
-                install_setuptools(self)
-
-            # Uninstall whatever version of pip came with the virtualenv.
-            # Earlier versions of pip were incapable of
-            # self-uninstallation on Windows, so we use the one we're testing.
-            self.run('python', '-c',
-                     '"import sys; sys.path.insert(0, %r); import pip; sys.exit(pip.main());"' % os.path.dirname(here),
-                     'uninstall', '-vvv', '-y', 'pip')
-
-            # Install this version instead
-            self.run('python', 'setup.py', 'install', cwd=src_folder, expect_stderr=True)
-            shutil.copytree(self.root_path, self.backup_path, True)
-        self._use_cached_pypi_server()
-        assert self.root_path.exists
-
-    def __del__(self):
-        pass # shutil.rmtree(str(self.root_path), ignore_errors=True)
-
-
-def run_pip(*args, **kw):
-    result = env.run('pip', *args, **kw)
-    ignore = []
-    for path, f in result.files_before.items():
-        # ignore updated directories, often due to .pyc or __pycache__
-        if (path in result.files_updated and
-            isinstance(result.files_updated[path], FoundDir)):
-            ignore.append(path)
-    for path in ignore:
-        del result.files_updated[path]
-    return result
-
-
-def write_file(filename, text, dest=None):
-    """Write a file in the dest (default=env.scratch_path)
-
-    """
-    env = get_env()
-    if dest:
-        complete_path = dest/ filename
-    else:
-        complete_path = env.scratch_path/ filename
-    f = open(complete_path, 'w')
-    f.write(text)
-    f.close()
-
-
-def mkdir(dirname):
-    os.mkdir(os.path.join(get_env().scratch_path, dirname))
-
-
-def get_env():
-    if env is None:
-        reset_env()
-    return env
-
-
-# FIXME ScriptTest does something similar, but only within a single
-# ProcResult; this generalizes it so states can be compared across
-# multiple commands.  Maybe should be rolled into ScriptTest?
-def diff_states(start, end, ignore=None):
-    """
-    Differences two "filesystem states" as represented by dictionaries
-    of FoundFile and FoundDir objects.
-
-    Returns a dictionary with following keys:
-
-    ``deleted``
-        Dictionary of files/directories found only in the start state.
-
-    ``created``
-        Dictionary of files/directories found only in the end state.
-
-    ``updated``
-        Dictionary of files whose size has changed (FIXME not entirely
-        reliable, but comparing contents is not possible because
-        FoundFile.bytes is lazy, and comparing mtime doesn't help if
-        we want to know if a file has been returned to its earlier
-        state).
-
-    Ignores mtime and other file attributes; only presence/absence and
-    size are considered.
-
-    """
-    ignore = ignore or []
-
-    def prefix_match(path, prefix):
-        if path == prefix:
-            return True
-        prefix = prefix.rstrip(os.path.sep) + os.path.sep
-        return path.startswith(prefix)
-
-    start_keys = set([k for k in start.keys()
-                      if not any([prefix_match(k, i) for i in ignore])])
-    end_keys = set([k for k in end.keys()
-                    if not any([prefix_match(k, i) for i in ignore])])
-    deleted = dict([(k, start[k]) for k in start_keys.difference(end_keys)])
-    created = dict([(k, end[k]) for k in end_keys.difference(start_keys)])
-    updated = {}
-    for k in start_keys.intersection(end_keys):
-        if (start[k].size != end[k].size):
-            updated[k] = end[k]
-    return dict(deleted=deleted, created=created, updated=updated)
-
-
-def assert_all_changes(start_state, end_state, expected_changes):
-    """
-    Fails if anything changed that isn't listed in the
-    expected_changes.
-
-    start_state is either a dict mapping paths to
-    scripttest.[FoundFile|FoundDir] objects or a TestPipResult whose
-    files_before we'll test.  end_state is either a similar dict or a
-    TestPipResult whose files_after we'll test.
-
-    Note: listing a directory means anything below
-    that directory can be expected to have changed.
-    """
-    start_files = start_state
-    end_files = end_state
-    if isinstance(start_state, TestPipResult):
-        start_files = start_state.files_before
-    if isinstance(end_state, TestPipResult):
-        end_files = end_state.files_after
-
-    diff = diff_states(start_files, end_files, ignore=expected_changes)
-    if list(diff.values()) != [{}, {}, {}]:
-        raise TestFailure('Unexpected changes:\n' + '\n'.join(
-            [k + ': ' + ', '.join(v.keys()) for k, v in diff.items()]))
-
-    # Don't throw away this potentially useful information
-    return diff
-
-
-def _create_test_package(env):
-    mkdir('version_pkg')
-    version_pkg_path = env.scratch_path/'version_pkg'
-    write_file('version_pkg.py', textwrap.dedent('''\
-                                def main():
-                                    print('0.1')
-                                '''), version_pkg_path)
-    write_file('setup.py', textwrap.dedent('''\
-                        from setuptools import setup, find_packages
-                        setup(name='version_pkg',
-                              version='0.1',
-                              packages=find_packages(),
-                              py_modules=['version_pkg'],
-                              entry_points=dict(console_scripts=['version_pkg=version_pkg:main']))
-                        '''), version_pkg_path)
-    env.run('git', 'init', cwd=version_pkg_path)
-    env.run('git', 'add', '.', cwd=version_pkg_path)
-    env.run('git', 'commit', '-q',
-            '--author', 'Pip <python-virtualenv@googlegroups.com>',
-            '-am', 'initial version', cwd=version_pkg_path)
-    return version_pkg_path
-
-
-def _change_test_package_version(env, version_pkg_path):
-    write_file('version_pkg.py', textwrap.dedent('''\
-        def main():
-            print("some different version")'''), version_pkg_path)
-    env.run('git', 'commit', '-q',
-            '--author', 'Pip <python-virtualenv@googlegroups.com>',
-            '-am', 'messed version',
-            cwd=version_pkg_path, expect_stderr=True)
-
-
-if __name__ == '__main__':
-    sys.stderr.write("Run pip's tests using nosetests. Requires virtualenv, ScriptTest, and nose.\n")
-    sys.exit(1)
diff --git a/vendor/pip-1.2.1/tests/test_proxy.py b/vendor/pip-1.2.1/tests/test_proxy.py
deleted file mode 100644
index fe6e551bddef9c5446ca32aa1c1ca7385c2b00bf..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_proxy.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""
-Tests for the proxy support in pip.
-
-TODO shouldn't need to hack sys.path in here.
-
-"""
-
-import os
-import sys
-sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
-
-import os
-import pip
-import getpass
-from pip.basecommand import get_proxy
-from tests.test_pip import here
-
-
-def new_getpass(prompt, answer='passwd'):
-    print('%s%s' % (prompt, answer))
-    return answer
-
-
-def test_correct_pip_version():
-    """
-    Check we are importing pip from the right place.
-
-    """
-    base = os.path.dirname(here)
-    assert pip.__file__.startswith(base), pip.__file__
-
-
-def test_remove_proxy():
-    """
-    Test removing proxy from environ.
-
-    """
-    if 'HTTP_PROXY' in os.environ:
-        del os.environ['HTTP_PROXY']
-    assert get_proxy() == None
-    os.environ['HTTP_PROXY'] = 'user:pwd@server.com:port'
-    assert get_proxy() == 'user:pwd@server.com:port'
-    del os.environ['HTTP_PROXY']
-    assert get_proxy('server.com') == 'server.com'
-    assert get_proxy('server.com:80') == 'server.com:80'
-    assert get_proxy('user:passwd@server.com:3128') == 'user:passwd@server.com:3128'
-
-
-def test_get_proxy():
-    """
-    Test get_proxy returns correct proxy info.
-
-    """
-    # monkeypatch getpass.getpass, to avoid asking for a password
-    old_getpass = getpass.getpass
-    getpass.getpass = new_getpass
-
-    # Test it:
-    assert get_proxy('user:@server.com:3128') == 'user:@server.com:3128'
-    assert get_proxy('user@server.com:3128') == 'user:passwd@server.com:3128'
-
-    # Undo monkeypatch
-    getpass.getpass = old_getpass
-
diff --git a/vendor/pip-1.2.1/tests/test_requirements.py b/vendor/pip-1.2.1/tests/test_requirements.py
deleted file mode 100644
index 59e1347c34ad77a94f923da0758450bf254207a5..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_requirements.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import os.path
-import textwrap
-from pip.backwardcompat import urllib
-from pip.req import Requirements
-from tests.test_pip import reset_env, run_pip, write_file, pyversion, here, path_to_url
-from tests.local_repos import local_checkout
-from tests.path import Path
-
-
-def test_requirements_file():
-    """
-    Test installing from a requirements file.
-
-    """
-    other_lib_name, other_lib_version = 'anyjson', '0.3'
-    env = reset_env()
-    write_file('initools-req.txt', textwrap.dedent("""\
-        INITools==0.2
-        # and something else to test out:
-        %s<=%s
-        """ % (other_lib_name, other_lib_version)))
-    result = run_pip('install', '-r', env.scratch_path / 'initools-req.txt')
-    assert env.site_packages/'INITools-0.2-py%s.egg-info' % pyversion in result.files_created
-    assert env.site_packages/'initools' in result.files_created
-    assert result.files_created[env.site_packages/other_lib_name].dir
-    fn = '%s-%s-py%s.egg-info' % (other_lib_name, other_lib_version, pyversion)
-    assert result.files_created[env.site_packages/fn].dir
-
-
-def test_relative_requirements_file():
-    """
-    Test installing from a requirements file with a relative path with an egg= definition..
-
-    """
-    url = path_to_url(os.path.join(here, 'packages', '..', 'packages', 'FSPkg')) + '#egg=FSPkg'
-    env = reset_env()
-    write_file('file-egg-req.txt', textwrap.dedent("""\
-        %s
-        """ % url))
-    result = run_pip('install', '-vvv', '-r', env.scratch_path / 'file-egg-req.txt')
-    assert (env.site_packages/'FSPkg-0.1dev-py%s.egg-info' % pyversion) in result.files_created, str(result)
-    assert (env.site_packages/'fspkg') in result.files_created, str(result.stdout)
-
-
-def test_multiple_requirements_files():
-    """
-    Test installing from multiple nested requirements files.
-
-    """
-    other_lib_name, other_lib_version = 'anyjson', '0.3'
-    env = reset_env()
-    write_file('initools-req.txt', textwrap.dedent("""\
-        -e %s@10#egg=INITools-dev
-        -r %s-req.txt""" % (local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'),
-                            other_lib_name)))
-    write_file('%s-req.txt' % other_lib_name, textwrap.dedent("""\
-        %s<=%s
-        """ % (other_lib_name, other_lib_version)))
-    result = run_pip('install', '-r', env.scratch_path / 'initools-req.txt')
-    assert result.files_created[env.site_packages/other_lib_name].dir
-    fn = '%s-%s-py%s.egg-info' % (other_lib_name, other_lib_version, pyversion)
-    assert result.files_created[env.site_packages/fn].dir
-    assert env.venv/'src'/'initools' in result.files_created
-
-
-def test_respect_order_in_requirements_file():
-    env = reset_env()
-    write_file('frameworks-req.txt', textwrap.dedent("""\
-        bidict
-        ordereddict
-        initools
-        """))
-    result = run_pip('install', '-r', env.scratch_path / 'frameworks-req.txt')
-    downloaded = [line for line in result.stdout.split('\n')
-                  if 'Downloading/unpacking' in line]
-
-    assert 'bidict' in downloaded[0], 'First download should ' \
-            'be "bidict" but was "%s"' % downloaded[0]
-    assert 'ordereddict' in downloaded[1], 'Second download should ' \
-            'be "ordereddict" but was "%s"' % downloaded[1]
-    assert 'initools' in downloaded[2], 'Third download should ' \
-            'be "initools" but was "%s"' % downloaded[2]
-
-
-def test_requirements_data_structure_keeps_order():
-    requirements = Requirements()
-    requirements['pip'] = 'pip'
-    requirements['nose'] = 'nose'
-    requirements['coverage'] = 'coverage'
-
-    assert ['pip', 'nose', 'coverage'] == list(requirements.values())
-    assert ['pip', 'nose', 'coverage'] == list(requirements.keys())
-
-
-def test_requirements_data_structure_implements__repr__():
-    requirements = Requirements()
-    requirements['pip'] = 'pip'
-    requirements['nose'] = 'nose'
-
-    assert "Requirements({'pip': 'pip', 'nose': 'nose'})" == repr(requirements)
-
-
-def test_requirements_data_structure_implements__contains__():
-    requirements = Requirements()
-    requirements['pip'] = 'pip'
-
-    assert 'pip' in requirements
-    assert 'nose' not in requirements
diff --git a/vendor/pip-1.2.1/tests/test_search.py b/vendor/pip-1.2.1/tests/test_search.py
deleted file mode 100644
index 53aad5349edd39234ecb46e8a000b3284c4fe2d3..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_search.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import pip.download
-from pip.commands.search import (compare_versions,
-                                 highest_version,
-                                 transform_hits,
-                                 SearchCommand)
-from pip.status_codes import NO_MATCHES_FOUND, SUCCESS
-from pip.backwardcompat import xmlrpclib, b
-from mock import Mock
-from tests.test_pip import run_pip, reset_env, pyversion
-from tests.pypi_server import assert_equal
-
-
-if pyversion >= '3':
-    VERBOSE_FALSE = False
-else:
-    VERBOSE_FALSE = 0
-
-
-def test_version_compare():
-    """
-    Test version comparison.
-
-    """
-    assert compare_versions('1.0', '1.1') == -1
-    assert compare_versions('1.1', '1.0') == 1
-    assert compare_versions('1.1a1', '1.1') == -1
-    assert compare_versions('1.1.1', '1.1a') == -1
-    assert highest_version(['1.0', '2.0', '0.1']) == '2.0'
-    assert highest_version(['1.0a1', '1.0']) == '1.0'
-
-
-def test_pypi_xml_transformation():
-    """
-    Test transformation of data structures (pypi xmlrpc to custom list).
-
-    """
-    pypi_hits = [{'_pypi_ordering': 100, 'name': 'foo', 'summary': 'foo summary', 'version': '1.0'},
-            {'_pypi_ordering': 200, 'name': 'foo', 'summary': 'foo summary v2', 'version': '2.0'},
-            {'_pypi_ordering': 50, 'name': 'bar', 'summary': 'bar summary', 'version': '1.0'}]
-    expected = [{'score': 200, 'versions': ['1.0', '2.0'], 'name': 'foo', 'summary': 'foo summary v2'},
-            {'score': 50, 'versions': ['1.0'], 'name': 'bar', 'summary': 'bar summary'}]
-    assert_equal(expected, transform_hits(pypi_hits))
-
-
-def test_search():
-    """
-    End to end test of search command.
-
-    """
-    reset_env()
-    output = run_pip('search', 'pip')
-    assert 'pip installs packages' in output.stdout
-
-
-def test_multiple_search():
-    """
-    Test searching for multiple packages at once.
-
-    """
-    reset_env()
-    output = run_pip('search', 'pip', 'INITools')
-    assert 'pip installs packages' in output.stdout
-    assert 'Tools for parsing and using INI-style files' in output.stdout
-
-
-def test_searching_through_Search_class():
-    """
-    Verify if ``pip.vcs.Search`` uses tests xmlrpclib.Transport class
-    """
-    original_xmlrpclib_transport = pip.download.xmlrpclib_transport
-    pip.download.xmlrpclib_transport = fake_transport = Mock()
-    query = 'mylittlequerythatdoesnotexists'
-    dumped_xmlrpc_request = b(xmlrpclib.dumps(({'name': query, 'summary': query}, 'or'), 'search'))
-    expected = [{'_pypi_ordering': 100, 'name': 'foo', 'summary': 'foo summary', 'version': '1.0'}]
-    fake_transport.request.return_value = (expected,)
-    pypi_searcher = SearchCommand()
-    result = pypi_searcher.search(query, 'http://pypi.python.org/pypi')
-    try:
-        assert expected == result, result
-        fake_transport.request.assert_called_with('pypi.python.org', '/pypi', dumped_xmlrpc_request, verbose=VERBOSE_FALSE)
-    finally:
-        pip.download.xmlrpclib_transport = original_xmlrpclib_transport
-
-
-def test_search_missing_argument():
-    """
-    Test missing required argument for search
-    """
-    env = reset_env(use_distribute=True)
-    result = run_pip('search', expect_error=True)
-    assert 'ERROR: Missing required argument (search query).' in result.stdout
-
-
-def test_run_method_should_return_sucess_when_find_packages():
-    """
-    Test SearchCommand.run for found package
-    """
-    options_mock = Mock()
-    options_mock.index = 'http://pypi.python.org/pypi'
-    search_cmd = SearchCommand()
-    status = search_cmd.run(options_mock, ('pip',))
-    assert status == SUCCESS
-
-
-def test_run_method_should_return_no_matches_found_when_does_not_find_packages():
-    """
-    Test SearchCommand.run for no matches
-    """
-    options_mock = Mock()
-    options_mock.index = 'http://pypi.python.org/pypi'
-    search_cmd = SearchCommand()
-    status = search_cmd.run(options_mock, ('non-existant-package',))
-    assert status == NO_MATCHES_FOUND, status
-
-
-def test_search_should_exit_status_code_zero_when_find_packages():
-    """
-    Test search exit status code for package found
-    """
-    env = reset_env(use_distribute=True)
-    result = run_pip('search', 'pip')
-    assert result.returncode == SUCCESS
-
-
-def test_search_exit_status_code_when_finds_no_package():
-    """
-    Test search exit status code for no matches
-    """
-    env = reset_env(use_distribute=True)
-    result = run_pip('search', 'non-existant-package', expect_error=True)
-    assert result.returncode == NO_MATCHES_FOUND
diff --git a/vendor/pip-1.2.1/tests/test_unicode.py b/vendor/pip-1.2.1/tests/test_unicode.py
deleted file mode 100644
index d9196e7505b0b0d30122cf0f3000495231d1c3aa..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_unicode.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os
-from tests.test_pip import here, reset_env, run_pip
-
-
-def test_install_package_that_emits_unicode():
-    """
-    Install a package with a setup.py that emits UTF-8 output and then fails.
-    This works fine in Python 2, but fails in Python 3 with:
-
-    Traceback (most recent call last):
-      ...
-      File "/Users/marc/python/virtualenvs/py3.1-phpserialize/lib/python3.2/site-packages/pip-1.0.2-py3.2.egg/pip/__init__.py", line 230, in call_subprocess
-        line = console_to_str(stdout.readline())
-      File "/Users/marc/python/virtualenvs/py3.1-phpserialize/lib/python3.2/site-packages/pip-1.0.2-py3.2.egg/pip/backwardcompat.py", line 60, in console_to_str
-        return s.decode(console_encoding)
-    UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 17: ordinal not in range(128)
-
-    Refs https://github.com/pypa/pip/issues/326
-    """
-
-    env = reset_env()
-    to_install = os.path.abspath(os.path.join(here, 'packages', 'BrokenEmitsUTF8'))
-    result = run_pip('install', to_install, expect_error=True)
-    assert '__main__.FakeError: this package designed to fail on install' in result.stdout
-    assert 'UnicodeDecodeError' not in result.stdout
diff --git a/vendor/pip-1.2.1/tests/test_uninstall.py b/vendor/pip-1.2.1/tests/test_uninstall.py
deleted file mode 100644
index c88c7a681f09a47e5bb30d2b889ccae4a62c4675..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_uninstall.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import textwrap
-import sys
-from os.path import join
-from tempfile import mkdtemp
-from tests.test_pip import reset_env, run_pip, assert_all_changes, write_file
-from tests.local_repos import local_repo, local_checkout
-
-from pip.util import rmtree
-
-
-def test_simple_uninstall():
-    """
-    Test simple install and uninstall.
-
-    """
-    env = reset_env()
-    result = run_pip('install', 'INITools==0.2', expect_error=True)
-    assert join(env.site_packages, 'initools') in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('uninstall', 'INITools', '-y', expect_error=True)
-    assert_all_changes(result, result2, [env.venv/'build', 'cache'])
-
-
-def test_uninstall_with_scripts():
-    """
-    Uninstall an easy_installed package with scripts.
-
-    """
-    env = reset_env()
-    result = env.run('easy_install', 'PyLogo', expect_stderr=True)
-    easy_install_pth = env.site_packages/ 'easy-install.pth'
-    pylogo = sys.platform == 'win32' and 'pylogo' or 'PyLogo'
-    assert(pylogo in result.files_updated[easy_install_pth].bytes)
-    result2 = run_pip('uninstall', 'pylogo', '-y', expect_error=True)
-    assert_all_changes(result, result2, [env.venv/'build', 'cache'])
-
-
-def test_uninstall_namespace_package():
-    """
-    Uninstall a distribution with a namespace package without clobbering
-    the namespace and everything in it.
-
-    """
-    env = reset_env()
-    result = run_pip('install', 'pd.requires==0.0.3', expect_error=True)
-    assert join(env.site_packages, 'pd') in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('uninstall', 'pd.find', '-y', expect_error=True)
-    assert join(env.site_packages, 'pd') not in result2.files_deleted, sorted(result2.files_deleted.keys())
-    assert join(env.site_packages, 'pd', 'find') in result2.files_deleted, sorted(result2.files_deleted.keys())
-
-
-def test_uninstall_console_scripts():
-    """
-    Test uninstalling a package with more files (console_script entry points, extra directories).
-
-    """
-    env = reset_env()
-    args = ['install']
-    args.append('discover')
-    result = run_pip(*args, **{"expect_error": True})
-    assert env.bin/'discover'+env.exe in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('uninstall', 'discover', '-y', expect_error=True)
-    assert_all_changes(result, result2, [env.venv/'build', 'cache'])
-
-
-def test_uninstall_easy_installed_console_scripts():
-    """
-    Test uninstalling package with console_scripts that is easy_installed.
-
-    """
-    env = reset_env()
-    args = ['easy_install']
-    args.append('discover')
-    result = env.run(*args, **{"expect_stderr": True})
-    assert env.bin/'discover'+env.exe in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('uninstall', 'discover', '-y')
-    assert_all_changes(result, result2, [env.venv/'build', 'cache'])
-
-
-def test_uninstall_editable_from_svn():
-    """
-    Test uninstalling an editable installation from svn.
-
-    """
-    env = reset_env()
-    result = run_pip('install', '-e', '%s#egg=initools-dev' %
-                     local_checkout('svn+http://svn.colorstudy.com/INITools/trunk'))
-    result.assert_installed('INITools')
-    result2 = run_pip('uninstall', '-y', 'initools')
-    assert (env.venv/'src'/'initools' in result2.files_after), 'oh noes, pip deleted my sources!'
-    assert_all_changes(result, result2, [env.venv/'src', env.venv/'build'])
-
-
-def test_uninstall_editable_with_source_outside_venv():
-    """
-    Test uninstalling editable install from existing source outside the venv.
-
-    """
-    try:
-        temp = mkdtemp()
-        tmpdir = join(temp, 'virtualenv')
-        _test_uninstall_editable_with_source_outside_venv(tmpdir)
-    finally:
-        rmtree(temp)
-
-
-def _test_uninstall_editable_with_source_outside_venv(tmpdir):
-    env = reset_env()
-    result = env.run('git', 'clone', local_repo('git+git://github.com/pypa/virtualenv'), tmpdir)
-    result2 = run_pip('install', '-e', tmpdir)
-    assert (join(env.site_packages, 'virtualenv.egg-link') in result2.files_created), list(result2.files_created.keys())
-    result3 = run_pip('uninstall', '-y', 'virtualenv', expect_error=True)
-    assert_all_changes(result, result3, [env.venv/'build'])
-
-
-def test_uninstall_from_reqs_file():
-    """
-    Test uninstall from a requirements file.
-
-    """
-    env = reset_env()
-    write_file('test-req.txt', textwrap.dedent("""\
-        -e %s#egg=initools-dev
-        # and something else to test out:
-        PyLogo<0.4
-        """ % local_checkout('svn+http://svn.colorstudy.com/INITools/trunk')))
-    result = run_pip('install', '-r', 'test-req.txt')
-    write_file('test-req.txt', textwrap.dedent("""\
-        # -f, -i, and --extra-index-url should all be ignored by uninstall
-        -f http://www.example.com
-        -i http://www.example.com
-        --extra-index-url http://www.example.com
-
-        -e %s#egg=initools-dev
-        # and something else to test out:
-        PyLogo<0.4
-        """ % local_checkout('svn+http://svn.colorstudy.com/INITools/trunk')))
-    result2 = run_pip('uninstall', '-r', 'test-req.txt', '-y')
-    assert_all_changes(
-        result, result2, [env.venv/'build', env.venv/'src', env.scratch/'test-req.txt'])
diff --git a/vendor/pip-1.2.1/tests/test_upgrade.py b/vendor/pip-1.2.1/tests/test_upgrade.py
deleted file mode 100644
index c6b8d686a58f28e1bb3d36e863d2cb4d3c729f89..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_upgrade.py
+++ /dev/null
@@ -1,192 +0,0 @@
-import textwrap
-from os.path import join
-from tests.test_pip import (here, reset_env, run_pip, assert_all_changes,
-                            write_file, pyversion, _create_test_package,
-                            _change_test_package_version)
-
-
-def test_no_upgrade_unless_requested():
-    """
-    No upgrade if not specifically requested.
-
-    """
-    reset_env()
-    run_pip('install', 'INITools==0.1', expect_error=True)
-    result = run_pip('install', 'INITools', expect_error=True)
-    assert not result.files_created, 'pip install INITools upgraded when it should not have'
-
-
-def test_upgrade_to_specific_version():
-    """
-    It does upgrade to specific version requested.
-
-    """
-    env = reset_env()
-    run_pip('install', 'INITools==0.1', expect_error=True)
-    result = run_pip('install', 'INITools==0.2', expect_error=True)
-    assert result.files_created, 'pip install with specific version did not upgrade'
-    assert env.site_packages/'INITools-0.1-py%s.egg-info' % pyversion in result.files_deleted
-    assert env.site_packages/'INITools-0.2-py%s.egg-info' % pyversion in result.files_created
-
-
-def test_upgrade_if_requested():
-    """
-    And it does upgrade if requested.
-
-    """
-    env = reset_env()
-    run_pip('install', 'INITools==0.1', expect_error=True)
-    result = run_pip('install', '--upgrade', 'INITools', expect_error=True)
-    assert result.files_created, 'pip install --upgrade did not upgrade'
-    assert env.site_packages/'INITools-0.1-py%s.egg-info' % pyversion not in result.files_created
-
-
-def test_upgrade_with_newest_already_installed():
-    """
-    If the newest version of a package is already installed, the package should
-    not be reinstalled and the user should be informed.
-    """
-
-    env = reset_env()
-    run_pip('install', 'INITools')
-    result = run_pip('install', '--upgrade', 'INITools')
-    assert not result.files_created, 'pip install --upgrade INITools upgraded when it should not have'
-    assert 'already up-to-date' in result.stdout
-
-
-def test_upgrade_force_reinstall_newest():
-    """
-    Force reinstallation of a package even if it is already at its newest
-    version if --force-reinstall is supplied.
-    """
-
-    env = reset_env()
-    result = run_pip('install', 'INITools')
-    assert env.site_packages/ 'initools' in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('install', '--upgrade', '--force-reinstall', 'INITools')
-    assert result2.files_updated, 'upgrade to INITools 0.3 failed'
-    result3 = run_pip('uninstall', 'initools', '-y', expect_error=True)
-    assert_all_changes(result, result3, [env.venv/'build', 'cache'])
-
-
-def test_uninstall_before_upgrade():
-    """
-    Automatic uninstall-before-upgrade.
-
-    """
-    env = reset_env()
-    result = run_pip('install', 'INITools==0.2', expect_error=True)
-    assert env.site_packages/ 'initools' in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('install', 'INITools==0.3', expect_error=True)
-    assert result2.files_created, 'upgrade to INITools 0.3 failed'
-    result3 = run_pip('uninstall', 'initools', '-y', expect_error=True)
-    assert_all_changes(result, result3, [env.venv/'build', 'cache'])
-
-
-def test_uninstall_before_upgrade_from_url():
-    """
-    Automatic uninstall-before-upgrade from URL.
-
-    """
-    env = reset_env()
-    result = run_pip('install', 'INITools==0.2', expect_error=True)
-    assert env.site_packages/ 'initools' in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('install', 'http://pypi.python.org/packages/source/I/INITools/INITools-0.3.tar.gz', expect_error=True)
-    assert result2.files_created, 'upgrade to INITools 0.3 failed'
-    result3 = run_pip('uninstall', 'initools', '-y', expect_error=True)
-    assert_all_changes(result, result3, [env.venv/'build', 'cache'])
-
-
-def test_upgrade_to_same_version_from_url():
-    """
-    When installing from a URL the same version that is already installed, no
-    need to uninstall and reinstall if --upgrade is not specified.
-
-    """
-    env = reset_env()
-    result = run_pip('install', 'INITools==0.3', expect_error=True)
-    assert env.site_packages/ 'initools' in result.files_created, sorted(result.files_created.keys())
-    result2 = run_pip('install', 'http://pypi.python.org/packages/source/I/INITools/INITools-0.3.tar.gz', expect_error=True)
-    assert not result2.files_updated, 'INITools 0.3 reinstalled same version'
-    result3 = run_pip('uninstall', 'initools', '-y', expect_error=True)
-    assert_all_changes(result, result3, [env.venv/'build', 'cache'])
-
-
-def test_upgrade_from_reqs_file():
-    """
-    Upgrade from a requirements file.
-
-    """
-    env = reset_env()
-    write_file('test-req.txt', textwrap.dedent("""\
-        PyLogo<0.4
-        # and something else to test out:
-        INITools==0.3
-        """))
-    install_result = run_pip('install', '-r', env.scratch_path/ 'test-req.txt')
-    write_file('test-req.txt', textwrap.dedent("""\
-        PyLogo
-        # and something else to test out:
-        INITools
-        """))
-    run_pip('install', '--upgrade', '-r', env.scratch_path/ 'test-req.txt')
-    uninstall_result = run_pip('uninstall', '-r', env.scratch_path/ 'test-req.txt', '-y')
-    assert_all_changes(install_result, uninstall_result, [env.venv/'build', 'cache', env.scratch/'test-req.txt'])
-
-
-def test_uninstall_rollback():
-    """
-    Test uninstall-rollback (using test package with a setup.py
-    crafted to fail on install).
-
-    """
-    env = reset_env()
-    find_links = 'file://' + join(here, 'packages')
-    result = run_pip('install', '-f', find_links, '--no-index', 'broken==0.1')
-    assert env.site_packages / 'broken.py' in result.files_created, list(result.files_created.keys())
-    result2 = run_pip('install', '-f', find_links, '--no-index', 'broken==0.2broken', expect_error=True)
-    assert result2.returncode == 1, str(result2)
-    assert env.run('python', '-c', "import broken; print(broken.VERSION)").stdout == '0.1\n'
-    assert_all_changes(result.files_after, result2, [env.venv/'build', 'pip-log.txt'])
-
-
-def test_editable_git_upgrade():
-    """
-    Test installing an editable git package from a repository, upgrading the repository,
-    installing again, and check it gets the newer version
-    """
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    run_pip('install', '-e', '%s#egg=version_pkg' % ('git+file://' + version_pkg_path))
-    version = env.run('version_pkg')
-    assert '0.1' in version.stdout
-    _change_test_package_version(env, version_pkg_path)
-    run_pip('install', '-e', '%s#egg=version_pkg' % ('git+file://' + version_pkg_path))
-    version2 = env.run('version_pkg')
-    assert 'some different version' in version2.stdout
-
-
-def test_should_not_install_always_from_cache():
-    """
-    If there is an old cached package, pip should download the newer version
-    Related to issue #175
-    """
-    env = reset_env()
-    run_pip('install', 'INITools==0.2', expect_error=True)
-    run_pip('uninstall', '-y', 'INITools')
-    result = run_pip('install', 'INITools==0.1', expect_error=True)
-    assert env.site_packages/'INITools-0.2-py%s.egg-info' % pyversion not in result.files_created
-    assert env.site_packages/'INITools-0.1-py%s.egg-info' % pyversion in result.files_created
-
-
-def test_install_with_ignoreinstalled_requested():
-    """
-    It installs package if ignore installed is set.
-
-    """
-    env = reset_env()
-    run_pip('install', 'INITools==0.1', expect_error=True)
-    result = run_pip('install', '-I', 'INITools', expect_error=True)
-    assert result.files_created, 'pip install -I did not install'
-    assert env.site_packages/'INITools-0.1-py%s.egg-info' % pyversion not in result.files_created
-
diff --git a/vendor/pip-1.2.1/tests/test_vcs_backends.py b/vendor/pip-1.2.1/tests/test_vcs_backends.py
deleted file mode 100644
index 9561254fa62d7491d7a50aab548ca8baf1d0fff1..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_vcs_backends.py
+++ /dev/null
@@ -1,131 +0,0 @@
-from tests.test_pip import (reset_env, run_pip,
-                      _create_test_package, _change_test_package_version)
-from tests.local_repos import local_checkout
-
-
-def test_install_editable_from_git_with_https():
-    """
-    Test cloning from Git with https.
-    """
-    reset_env()
-    result = run_pip('install', '-e',
-                     '%s#egg=pip-test-package' %
-                     local_checkout('git+https://github.com/pypa/pip-test-package.git'),
-                     expect_error=True)
-    result.assert_installed('pip-test-package', with_files=['.git'])
-
-
-def test_git_with_sha1_revisions():
-    """
-    Git backend should be able to install from SHA1 revisions
-    """
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    _change_test_package_version(env, version_pkg_path)
-    sha1 = env.run('git', 'rev-parse', 'HEAD~1', cwd=version_pkg_path).stdout.strip()
-    run_pip('install', '-e', '%s@%s#egg=version_pkg' % ('git+file://' + version_pkg_path.abspath.replace('\\', '/'), sha1))
-    version = env.run('version_pkg')
-    assert '0.1' in version.stdout, version.stdout
-
-
-def test_git_with_branch_name_as_revision():
-    """
-    Git backend should be able to install from branch names
-    """
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    env.run('git', 'checkout', '-b', 'test_branch', expect_stderr=True, cwd=version_pkg_path)
-    _change_test_package_version(env, version_pkg_path)
-    run_pip('install', '-e', '%s@test_branch#egg=version_pkg' % ('git+file://' + version_pkg_path.abspath.replace('\\', '/')))
-    version = env.run('version_pkg')
-    assert 'some different version' in version.stdout
-
-
-def test_git_with_tag_name_as_revision():
-    """
-    Git backend should be able to install from tag names
-    """
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    env.run('git', 'tag', 'test_tag', expect_stderr=True, cwd=version_pkg_path)
-    _change_test_package_version(env, version_pkg_path)
-    run_pip('install', '-e', '%s@test_tag#egg=version_pkg' % ('git+file://' + version_pkg_path.abspath.replace('\\', '/')))
-    version = env.run('version_pkg')
-    assert '0.1' in version.stdout
-
-
-def test_git_with_tag_name_and_update():
-    """
-    Test cloning a git repository and updating to a different version.
-    """
-    reset_env()
-    result = run_pip('install', '-e', '%s#egg=pip-test-package' %
-                     local_checkout('git+http://github.com/pypa/pip-test-package.git'),
-                     expect_error=True)
-    result.assert_installed('pip-test-package', with_files=['.git'])
-    result = run_pip('install', '--global-option=--version', '-e',
-                     '%s@0.1.1#egg=pip-test-package' %
-                     local_checkout('git+http://github.com/pypa/pip-test-package.git'),
-                     expect_error=True)
-    assert '0.1.1\n' in result.stdout
-
-
-def test_git_branch_should_not_be_changed():
-    """
-    Editable installations should not change branch
-    related to issue #32 and #161
-    """
-    env = reset_env()
-    run_pip('install', '-e', '%s#egg=pip-test-package' %
-                local_checkout('git+http://github.com/pypa/pip-test-package.git'),
-                expect_error=True)
-    source_dir = env.venv_path/'src'/'pip-test-package'
-    result = env.run('git', 'branch', cwd=source_dir)
-    assert '* master' in result.stdout, result.stdout
-
-
-def test_git_with_non_editable_unpacking():
-    """
-    Test cloning a git repository from a non-editable URL with a given tag.
-    """
-    reset_env()
-    result = run_pip('install', '--global-option=--version', local_checkout(
-                     'git+http://github.com/pypa/pip-test-package.git@0.1.1#egg=pip-test-package'
-                     ), expect_error=True)
-    assert '0.1.1\n' in result.stdout
-
-
-def test_git_with_editable_where_egg_contains_dev_string():
-    """
-    Test cloning a git repository from an editable url which contains "dev" string
-    """
-    reset_env()
-    result = run_pip('install', '-e', '%s#egg=django-devserver' %
-                     local_checkout('git+git://github.com/dcramer/django-devserver.git'))
-    result.assert_installed('django-devserver', with_files=['.git'])
-
-
-def test_git_with_non_editable_where_egg_contains_dev_string():
-    """
-    Test cloning a git repository from a non-editable url which contains "dev" string
-    """
-    env = reset_env()
-    result = run_pip('install', '%s#egg=django-devserver' %
-                     local_checkout('git+git://github.com/dcramer/django-devserver.git'))
-    devserver_folder = env.site_packages/'devserver'
-    assert devserver_folder in result.files_created, str(result)
-
-
-def test_git_with_ambiguous_revs():
-    """
-    Test git with two "names" (tag/branch) pointing to the same commit
-    """
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    package_url = 'git+file://%s@0.1#egg=version_pkg' % (version_pkg_path.abspath.replace('\\', '/'))
-    env.run('git', 'tag', '0.1', cwd=version_pkg_path)
-    result = run_pip('install', '-e', package_url)
-    assert 'Could not find a tag or branch' not in result.stdout
-    # it is 'version-pkg' instead of 'version_pkg' because
-    # egg-link name is version-pkg.egg-link because it is a single .py module
-    result.assert_installed('version-pkg', with_files=['.git'])
diff --git a/vendor/pip-1.2.1/tests/test_vcs_bazaar.py b/vendor/pip-1.2.1/tests/test_vcs_bazaar.py
deleted file mode 100644
index 4e43fe5f9998599367b8e16d4cdbdd6c5199a421..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_vcs_bazaar.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from tests.test_pip import pyversion
-from pip.vcs.bazaar import Bazaar
-
-if pyversion >= '3':
-    VERBOSE_FALSE = False
-else:
-    VERBOSE_FALSE = 0
-
-
-def test_bazaar_simple_urls():
-    """
-    Test bzr url support.
-
-    SSH and launchpad have special handling.
-    """
-    http_bzr_repo = Bazaar(url='bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject')
-    https_bzr_repo = Bazaar(url='bzr+https://bzr.myproject.org/MyProject/trunk/#egg=MyProject')
-    ssh_bzr_repo = Bazaar(url='bzr+ssh://bzr.myproject.org/MyProject/trunk/#egg=MyProject')
-    ftp_bzr_repo = Bazaar(url='bzr+ftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject')
-    sftp_bzr_repo = Bazaar(url='bzr+sftp://bzr.myproject.org/MyProject/trunk/#egg=MyProject')
-    launchpad_bzr_repo = Bazaar(url='bzr+lp:MyLaunchpadProject#egg=MyLaunchpadProject')
-
-    assert http_bzr_repo.get_url_rev() == ('http://bzr.myproject.org/MyProject/trunk/', None)
-    assert https_bzr_repo.get_url_rev() == ('https://bzr.myproject.org/MyProject/trunk/', None)
-    assert ssh_bzr_repo.get_url_rev() == ('bzr+ssh://bzr.myproject.org/MyProject/trunk/', None)
-    assert ftp_bzr_repo.get_url_rev() == ('ftp://bzr.myproject.org/MyProject/trunk/', None)
-    assert sftp_bzr_repo.get_url_rev() == ('sftp://bzr.myproject.org/MyProject/trunk/', None)
-    assert launchpad_bzr_repo.get_url_rev() == ('lp:MyLaunchpadProject', None)
-
diff --git a/vendor/pip-1.2.1/tests/test_vcs_git.py b/vendor/pip-1.2.1/tests/test_vcs_git.py
deleted file mode 100644
index 0b3abab2e1331de3de7e2d51ade45042e2b6d794..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_vcs_git.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from mock import patch
-from pip.vcs.git import Git
-from tests.test_pip import (reset_env, run_pip,
-                            _create_test_package)
-
-
-def test_get_tag_revs_should_return_tag_name_and_commit_pair():
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    env.run('git', 'tag', '0.1', cwd=version_pkg_path)
-    env.run('git', 'tag', '0.2', cwd=version_pkg_path)
-    commit = env.run('git', 'rev-parse', 'HEAD',
-                     cwd=version_pkg_path).stdout.strip()
-    git = Git()
-    result = git.get_tag_revs(version_pkg_path)
-    assert result == {'0.1': commit, '0.2': commit}, result
-
-
-def test_get_branch_revs_should_return_branch_name_and_commit_pair():
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    env.run('git', 'branch', 'branch0.1', cwd=version_pkg_path)
-    commit = env.run('git', 'rev-parse', 'HEAD',
-                     cwd=version_pkg_path).stdout.strip()
-    git = Git()
-    result = git.get_branch_revs(version_pkg_path)
-    assert result == {'master': commit, 'branch0.1': commit}
-
-
-def test_get_branch_revs_should_ignore_no_branch():
-    env = reset_env()
-    version_pkg_path = _create_test_package(env)
-    env.run('git', 'branch', 'branch0.1', cwd=version_pkg_path)
-    commit = env.run('git', 'rev-parse', 'HEAD',
-                     cwd=version_pkg_path).stdout.strip()
-    # current branch here is "* (nobranch)"
-    env.run('git', 'checkout', commit,
-            cwd=version_pkg_path, expect_stderr=True)
-    git = Git()
-    result = git.get_branch_revs(version_pkg_path)
-    assert result == {'master': commit, 'branch0.1': commit}
-
-
-@patch('pip.vcs.git.Git.get_tag_revs')
-@patch('pip.vcs.git.Git.get_branch_revs')
-def test_check_rev_options_should_handle_branch_name(branches_revs_mock,
-                                                     tags_revs_mock):
-    branches_revs_mock.return_value = {'master': '123456'}
-    tags_revs_mock.return_value = {'0.1': '123456'}
-    git = Git()
-
-    result = git.check_rev_options('master', '.', [])
-    assert result == ['123456']
-
-
-@patch('pip.vcs.git.Git.get_tag_revs')
-@patch('pip.vcs.git.Git.get_branch_revs')
-def test_check_rev_options_should_handle_tag_name(branches_revs_mock,
-                                                  tags_revs_mock):
-    branches_revs_mock.return_value = {'master': '123456'}
-    tags_revs_mock.return_value = {'0.1': '123456'}
-    git = Git()
-
-    result = git.check_rev_options('0.1', '.', [])
-    assert result == ['123456']
-
-
-@patch('pip.vcs.git.Git.get_tag_revs')
-@patch('pip.vcs.git.Git.get_branch_revs')
-def test_check_rev_options_should_handle_ambiguous_commit(branches_revs_mock,
-                                                          tags_revs_mock):
-    branches_revs_mock.return_value = {'master': '123456'}
-    tags_revs_mock.return_value = {'0.1': '123456'}
-    git = Git()
-
-    result = git.check_rev_options('0.1', '.', [])
-    assert result == ['123456'], result
diff --git a/vendor/pip-1.2.1/tests/test_vcs_subversion.py b/vendor/pip-1.2.1/tests/test_vcs_subversion.py
deleted file mode 100644
index 2122201135ec5c34bbc6829cea7787578d6aa4d8..0000000000000000000000000000000000000000
--- a/vendor/pip-1.2.1/tests/test_vcs_subversion.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from mock import patch
-from pip.vcs.subversion import Subversion
-from tests.test_pip import reset_env
-
-@patch('pip.vcs.subversion.call_subprocess')
-def test_obtain_should_recognize_auth_info_in_url(call_subprocess_mock):
-    env = reset_env()
-    svn = Subversion(url='svn+http://username:password@svn.example.com/')
-    svn.obtain(env.scratch_path/'test')
-    call_subprocess_mock.assert_called_with([
-        svn.cmd, 'checkout', '-q', '--username', 'username', '--password', 'password',
-        'http://username:password@svn.example.com/', env.scratch_path/'test'])
-
-@patch('pip.vcs.subversion.call_subprocess')
-def test_export_should_recognize_auth_info_in_url(call_subprocess_mock):
-    env = reset_env()
-    svn = Subversion(url='svn+http://username:password@svn.example.com/')
-    svn.export(env.scratch_path/'test')
-    assert call_subprocess_mock.call_args[0] == ([
-        svn.cmd, 'export', '--username', 'username', '--password', 'password',
-        'http://username:password@svn.example.com/', env.scratch_path/'test'],)