diff --git a/vendor/distribute-0.6.35/CHANGES.txt b/vendor/distribute-0.6.35/CHANGES.txt
new file mode 100644
index 0000000000000000000000000000000000000000..cae946e0dc7f610a68cf437b6011a91403570b99
--- /dev/null
+++ b/vendor/distribute-0.6.35/CHANGES.txt
@@ -0,0 +1,498 @@
+=======
+CHANGES
+=======
+
+------
+0.6.35
+------
+
+Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in
+how it parses version numbers.
+
+* Issue #278: Restored compatibility with distribute 0.6.22 and setuptools
+  0.6. Updated the documentation to match more closely with the version
+  parsing as intended in setuptools 0.6.
+
+------
+0.6.34
+------
+
+* Issue #341: 0.6.33 fails to build under Python 2.4.
+
+------
+0.6.33
+------
+
+* Fix 2 errors with Jython 2.5.
+* Fix 1 failure with Jython 2.5 and 2.7.
+* Disable workaround for Jython scripts on Linux systems.
+* Issue #336: `setup.py` no longer masks failure exit code when tests fail.
+* Fix issue in pkg_resources where try/except around a platform-dependent
+  import would trigger hook load failures on Mercurial. See pull request 32
+  for details.
+* Issue #341: Fix a ResourceWarning.
+
+------
+0.6.32
+------
+
+* Fix test suite with Python 2.6.
+* Fix some DeprecationWarnings and ResourceWarnings.
+* Issue #335: Backed out `setup_requires` superceding installed requirements
+  until regression can be addressed.
+
+------
+0.6.31
+------
+
+* Issue #303: Make sure the manifest only ever contains UTF-8 in Python 3.
+* Issue #329: Properly close files created by tests for compatibility with
+  Jython.
+* Work around Jython bugs `#1980 <http://bugs.jython.org/issue1980>`_ and
+  `#1981 <http://bugs.jython.org/issue1981>`_.
+* Issue #334: Provide workaround for packages that reference `sys.__stdout__`
+  such as numpy does. This change should address
+  `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
+  as the system encoding is UTF-8 or the IO encoding is specified in the
+  environment, i.e.::
+
+     PYTHONIOENCODING=utf8 pip install numpy
+
+* Fix for encoding issue when installing from Windows executable on Python 3.
+* Issue #323: Allow `setup_requires` requirements to supercede installed
+  requirements. Added some new keyword arguments to existing pkg_resources
+  methods. Also had to updated how __path__ is handled for namespace packages
+  to ensure that when a new egg distribution containing a namespace package is
+  placed on sys.path, the entries in __path__ are found in the same order they
+  would have been in had that egg been on the path when pkg_resources was
+  first imported.
+
+------
+0.6.30
+------
+
+* Issue #328: Clean up temporary directories in distribute_setup.py.
+* Fix fatal bug in distribute_setup.py.
+
+------
+0.6.29
+------
+
+* Pull Request #14: Honor file permissions in zip files.
+* Issue #327: Merged pull request #24 to fix a dependency problem with pip.
+* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
+* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
+  to produce uploadable documentation.
+* Issue #326: `upload_docs` provided mangled auth credentials under Python 3.
+* Issue #320: Fix check for "createable" in distribute_setup.py.
+* Issue #305: Remove a warning that was triggered during normal operations.
+* Issue #311: Print metadata in UTF-8 independent of platform.
+* Issue #303: Read manifest file with UTF-8 encoding under Python 3.
+* Issue #301: Allow to run tests of namespace packages when using 2to3.
+* Issue #304: Prevent import loop in site.py under Python 3.3.
+* Issue #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
+* Issue #299: The develop command didn't work on Python 3, when using 2to3,
+  as the egg link would go to the Python 2 source. Linking to the 2to3'd code
+  in build/lib makes it work, although you will have to rebuild the module
+  before testing it.
+* Issue #306: Even if 2to3 is used, we build in-place under Python 2.
+* Issue #307: Prints the full path when .svn/entries is broken.
+* Issue #313: Support for sdist subcommands (Python 2.7)
+* Issue #314: test_local_index() would fail an OS X.
+* Issue #310: Non-ascii characters in a namespace __init__.py causes errors.
+* Issue #218: Improved documentation on behavior of `package_data` and
+  `include_package_data`. Files indicated by `package_data` are now included
+  in the manifest.
+* `distribute_setup.py` now allows a `--download-base` argument for retrieving
+  distribute from a specified location.
+
+------
+0.6.28
+------
+
+* Issue #294: setup.py can now be invoked from any directory.
+* Scripts are now installed honoring the umask.
+* Added support for .dist-info directories.
+* Issue #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on
+  Python 3.3.
+
+------
+0.6.27
+------
+
+* Support current snapshots of CPython 3.3.
+* Distribute now recognizes README.rst as a standard, default readme file.
+* Exclude 'encodings' modules when removing modules from sys.modules.
+  Workaround for #285.
+* Issue #231: Don't fiddle with system python when used with buildout
+  (bootstrap.py)
+
+------
+0.6.26
+------
+
+* Issue #183: Symlinked files are now extracted from source distributions.
+* Issue #227: Easy_install fetch parameters are now passed during the
+  installation of a source distribution; now fulfillment of setup_requires
+  dependencies will honor the parameters passed to easy_install.
+
+------
+0.6.25
+------
+
+* Issue #258: Workaround a cache issue
+* Issue #260: distribute_setup.py now accepts the --user parameter for
+  Python 2.6 and later.
+* Issue #262: package_index.open_with_auth no longer throws LookupError
+  on Python 3.
+* Issue #269: AttributeError when an exception occurs reading Manifest.in
+  on late releases of Python.
+* Issue #272: Prevent TypeError when namespace package names are unicode
+  and single-install-externally-managed is used. Also fixes PIP issue
+  449.
+* Issue #273: Legacy script launchers now install with Python2/3 support.
+
+------
+0.6.24
+------
+
+* Issue #249: Added options to exclude 2to3 fixers
+
+------
+0.6.23
+------
+
+* Issue #244: Fixed a test
+* Issue #243: Fixed a test
+* Issue #239: Fixed a test
+* Issue #240: Fixed a test
+* Issue #241: Fixed a test
+* Issue #237: Fixed a test
+* Issue #238: easy_install now uses 64bit executable wrappers on 64bit Python
+* Issue #208: Fixed parsed_versions, it now honors post-releases as noted in the documentation
+* Issue #207: Windows cli and gui wrappers pass CTRL-C to child python process
+* Issue #227: easy_install now passes its arguments to setup.py bdist_egg
+* Issue #225: Fixed a NameError on Python 2.5, 2.4
+
+------
+0.6.21
+------
+
+* Issue #225: FIxed a regression on py2.4
+
+------
+0.6.20
+------
+
+* Issue #135: Include url in warning when processing URLs in package_index.
+* Issue #212: Fix issue where easy_instal fails on Python 3 on windows installer.
+* Issue #213: Fix typo in documentation.
+
+------
+0.6.19
+------
+
+* Issue 206: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
+
+------
+0.6.18
+------
+
+* Issue 210: Fixed a regression introduced by Issue 204 fix.
+
+------
+0.6.17
+------
+
+* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
+  variable to allow to disable installation of easy_install-${version} script.
+* Support Python >=3.1.4 and >=3.2.1.
+* Issue 204: Don't try to import the parent of a namespace package in
+  declare_namespace
+* Issue 196: Tolerate responses with multiple Content-Length headers
+* Issue 205: Sandboxing doesn't preserve working_set. Leads to setup_requires
+  problems.
+
+------
+0.6.16
+------
+
+* Builds sdist gztar even on Windows (avoiding Issue 193).
+* Issue 192: Fixed metadata omitted on Windows when package_dir
+  specified with forward-slash.
+* Issue 195: Cython build support.
+* Issue 200: Issues with recognizing 64-bit packages on Windows.
+
+------
+0.6.15
+------
+
+* Fixed typo in bdist_egg
+* Several issues under Python 3 has been solved.
+* Issue 146: Fixed missing DLL files after easy_install of windows exe package.
+
+------
+0.6.14
+------
+
+* Issue 170: Fixed unittest failure. Thanks to Toshio.
+* Issue 171: Fixed race condition in unittests cause deadlocks in test suite.
+* Issue 143: Fixed a lookup issue with easy_install.
+  Thanks to David and Zooko.
+* Issue 174: Fixed the edit mode when its used with setuptools itself
+
+------
+0.6.13
+------
+
+* Issue 160: 2.7 gives ValueError("Invalid IPv6 URL")
+* Issue 150: Fixed using ~/.local even in a --no-site-packages virtualenv
+* Issue 163: scan index links before external links, and don't use the md5 when
+  comparing two distributions
+
+------
+0.6.12
+------
+
+* Issue 149: Fixed various failures on 2.3/2.4
+
+------
+0.6.11
+------
+
+* Found another case of SandboxViolation - fixed
+* Issue 15 and 48: Introduced a socket timeout of 15 seconds on url openings
+* Added indexsidebar.html into MANIFEST.in
+* Issue 108: Fixed TypeError with Python3.1
+* Issue 121: Fixed --help install command trying to actually install.
+* Issue 112: Added an os.makedirs so that Tarek's solution will work.
+* Issue 133: Added --no-find-links to easy_install
+* Added easy_install --user
+* Issue 100: Fixed develop --user not taking '.' in PYTHONPATH into account
+* Issue 134: removed spurious UserWarnings. Patch by VanLindberg
+* Issue 138: cant_write_to_target error when setup_requires is used.
+* Issue 147: respect the sys.dont_write_bytecode flag
+
+------
+0.6.10
+------
+
+* Reverted change made for the DistributionNotFound exception because
+  zc.buildout uses the exception message to get the name of the
+  distribution.
+
+-----
+0.6.9
+-----
+
+* Issue 90: unknown setuptools version can be added in the working set
+* Issue 87: setupt.py doesn't try to convert distribute_setup.py anymore
+  Initial Patch by arfrever.
+* Issue 89: added a side bar with a download link to the doc.
+* Issue 86: fixed missing sentence in pkg_resources doc.
+* Added a nicer error message when a DistributionNotFound is raised.
+* Issue 80: test_develop now works with Python 3.1
+* Issue 93: upload_docs now works if there is an empty sub-directory.
+* Issue 70: exec bit on non-exec files
+* Issue 99: now the standalone easy_install command doesn't uses a
+  "setup.cfg" if any exists in the working directory. It will use it
+  only if triggered by ``install_requires`` from a setup.py call
+  (install, develop, etc).
+* Issue 101: Allowing ``os.devnull`` in Sandbox
+* Issue 92: Fixed the "no eggs" found error with MacPort
+  (platform.mac_ver() fails)
+* Issue 103: test_get_script_header_jython_workaround not run
+  anymore under py3 with C or POSIX local. Contributed by Arfrever.
+* Issue 104: remvoved the assertion when the installation fails,
+  with a nicer message for the end user.
+* Issue 100: making sure there's no SandboxViolation when
+  the setup script patches setuptools.
+
+-----
+0.6.8
+-----
+
+* Added "check_packages" in dist. (added in Setuptools 0.6c11)
+* Fixed the DONT_PATCH_SETUPTOOLS state.
+
+-----
+0.6.7
+-----
+
+* Issue 58: Added --user support to the develop command
+* Issue 11: Generated scripts now wrap their call to the script entry point
+  in the standard "if name == 'main'"
+* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
+  can drive an installation that doesn't patch a global setuptools.
+* Reviewed unladen-swallow specific change from
+  http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
+  and determined that it no longer applies. Distribute should work fine with
+  Unladen Swallow 2009Q3.
+* Issue 21: Allow PackageIndex.open_url to gracefully handle all cases of a
+  httplib.HTTPException instead of just InvalidURL and BadStatusLine.
+* Removed virtual-python.py from this distribution and updated documentation
+  to point to the actively maintained virtualenv instead.
+* Issue 64: use_setuptools no longer rebuilds the distribute egg every
+  time it is run
+* use_setuptools now properly respects the requested version
+* use_setuptools will no longer try to import a distribute egg for the
+  wrong Python version
+* Issue 74: no_fake should be True by default.
+* Issue 72: avoid a bootstrapping issue with easy_install -U
+
+-----
+0.6.6
+-----
+
+* Unified the bootstrap file so it works on both py2.x and py3k without 2to3
+  (patch by Holger Krekel)
+
+-----
+0.6.5
+-----
+
+* Issue 65: cli.exe and gui.exe are now generated at build time,
+  depending on the platform in use.
+
+* Issue 67: Fixed doc typo (PEP 381/382)
+
+* Distribute no longer shadows setuptools if we require a 0.7-series
+  setuptools.  And an error is raised when installing a 0.7 setuptools with
+  distribute.
+
+* When run from within buildout, no attempt is made to modify an existing
+  setuptools egg, whether in a shared egg directory or a system setuptools.
+
+* Fixed a hole in sandboxing allowing builtin file to write outside of
+  the sandbox.
+
+-----
+0.6.4
+-----
+
+* Added the generation of `distribute_setup_3k.py` during the release.
+  This closes issue #52.
+
+* Added an upload_docs command to easily upload project documentation to
+  PyPI's http://packages.python.org. This close issue #56.
+
+* Fixed a bootstrap bug on the use_setuptools() API.
+
+-----
+0.6.3
+-----
+
+setuptools
+==========
+
+* Fixed a bunch of calls to file() that caused crashes on Python 3.
+
+bootstrapping
+=============
+
+* Fixed a bug in sorting that caused bootstrap to fail on Python 3.
+
+-----
+0.6.2
+-----
+
+setuptools
+==========
+
+* Added Python 3 support; see docs/python3.txt.
+  This closes http://bugs.python.org/setuptools/issue39.
+
+* Added option to run 2to3 automatically when installing on Python 3.
+  This closes issue #31.
+
+* Fixed invalid usage of requirement.parse, that broke develop -d.
+  This closes http://bugs.python.org/setuptools/issue44.
+
+* Fixed script launcher for 64-bit Windows.
+  This closes http://bugs.python.org/setuptools/issue2.
+
+* KeyError when compiling extensions.
+  This closes http://bugs.python.org/setuptools/issue41.
+
+bootstrapping
+=============
+
+* Fixed bootstrap not working on Windows. This closes issue #49.
+
+* Fixed 2.6 dependencies. This closes issue #50.
+
+* Make sure setuptools is patched when running through easy_install
+  This closes http://bugs.python.org/setuptools/issue40.
+
+-----
+0.6.1
+-----
+
+setuptools
+==========
+
+* package_index.urlopen now catches BadStatusLine and malformed url errors.
+  This closes issue #16 and issue #18.
+
+* zip_ok is now False by default. This closes
+  http://bugs.python.org/setuptools/issue33.
+
+* Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20.
+
+* Fixed invalid bootstraping with easy_install installation (issue #40).
+  Thanks to Florian Schulze for the help.
+
+* Removed buildout/bootstrap.py. A new repository will create a specific
+  bootstrap.py script.
+
+
+bootstrapping
+=============
+
+* The boostrap process leave setuptools alone if detected in the system
+  and --root or --prefix is provided, but is not in the same location.
+  This closes issue #10.
+
+---
+0.6
+---
+
+setuptools
+==========
+
+* Packages required at build time where not fully present at install time.
+  This closes issue #12.
+
+* Protected against failures in tarfile extraction. This closes issue #10.
+
+* Made Jython api_tests.txt doctest compatible. This closes issue #7.
+
+* sandbox.py replaced builtin type file with builtin function open. This
+  closes issue #6.
+
+* Immediately close all file handles. This closes issue #3.
+
+* Added compatibility with Subversion 1.6. This references issue #1.
+
+pkg_resources
+=============
+
+* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
+  instead. Based on a patch from ronaldoussoren. This closes issue #5.
+
+* Fixed a SandboxViolation for mkdir that could occur in certain cases.
+  This closes issue #13.
+
+* Allow to find_on_path on systems with tight permissions to fail gracefully.
+  This closes issue #9.
+
+* Corrected inconsistency between documentation and code of add_entry.
+  This closes issue #8.
+
+* Immediately close all file handles. This closes issue #3.
+
+easy_install
+============
+
+* Immediately close all file handles. This closes issue #3.
+
diff --git a/vendor/distribute-0.6.35/CONTRIBUTORS.txt b/vendor/distribute-0.6.35/CONTRIBUTORS.txt
new file mode 100644
index 0000000000000000000000000000000000000000..22c90aba19c744d8b34dd2cfcb1c1eb8101c2573
--- /dev/null
+++ b/vendor/distribute-0.6.35/CONTRIBUTORS.txt
@@ -0,0 +1,30 @@
+============
+Contributors
+============
+
+* Alex Grönholm
+* Alice Bevan-McGregor
+* Arfrever Frehtes Taifersar Arahesis
+* Christophe Combelles
+* Daniel Stutzbach
+* Daniel Holth
+* Hanno Schlichting
+* Jannis Leidel
+* Jason R. Coombs
+* Jim Fulton
+* Jonathan Lange
+* Justin Azoff
+* Lennart Regebro
+* Marc Abramowitz
+* Martin von Löwis
+* Noufal Ibrahim
+* Pete Hollobon
+* Philip Jenvey
+* Reinout van Rees
+* Robert Myers
+* Stefan H. Holek
+* Tarek Ziadé
+* Toshio Kuratomi
+
+If you think you name is missing, please add it (alpha order by first name)
+
diff --git a/vendor/distribute-0.6.35/DEVGUIDE.txt b/vendor/distribute-0.6.35/DEVGUIDE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8dcabfd1d7f60066772391a873d687392f476123
--- /dev/null
+++ b/vendor/distribute-0.6.35/DEVGUIDE.txt
@@ -0,0 +1,22 @@
+============================
+Quick notes for contributors
+============================
+
+Distribute is using Mercurial.
+
+Grab the code at bitbucket::
+
+    $ hg clone https://bitbucket.org/tarek/distribute
+
+If you want to contribute changes, we recommend you fork the repository on
+bitbucket, commit the changes to your repository, and then make a pull request
+on bitbucket. If you make some changes, don't forget to:
+
+- add a note in CHANGES.txt
+
+And remember that 0.6 (the only development line) is only bug fixes, and the
+APIs should be fully backward compatible with Setuptools.
+
+You can run the tests via::
+
+    $ python setup.py test
diff --git a/vendor/distribute-0.6.35/MANIFEST.in b/vendor/distribute-0.6.35/MANIFEST.in
new file mode 100644
index 0000000000000000000000000000000000000000..9837747a223808119196983d12531a01068991a1
--- /dev/null
+++ b/vendor/distribute-0.6.35/MANIFEST.in
@@ -0,0 +1,9 @@
+recursive-include setuptools *.py *.txt *.exe
+recursive-include tests *.py *.c *.pyx *.txt
+recursive-include setuptools/tests *.html
+recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html
+recursive-include _markerlib *.py
+include *.py
+include *.txt
+include MANIFEST.in
+include launcher.c
diff --git a/vendor/distribute-0.6.35/PKG-INFO b/vendor/distribute-0.6.35/PKG-INFO
new file mode 100644
index 0000000000000000000000000000000000000000..4a0374a7ef84ab5d126d600820a7a1b2c1472ef5
--- /dev/null
+++ b/vendor/distribute-0.6.35/PKG-INFO
@@ -0,0 +1,871 @@
+Metadata-Version: 1.1
+Name: distribute
+Version: 0.6.35
+Summary: Easily download, build, install, upgrade, and uninstall Python packages
+Home-page: http://packages.python.org/distribute
+Author: The fellowship of the packaging
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+        Installing and Using Distribute
+        ===============================
+        
+        .. contents:: **Table of Contents**
+        
+        -----------
+        Disclaimers
+        -----------
+        
+        About the fork
+        ==============
+        
+        `Distribute` is a fork of the `Setuptools` project.
+        
+        Distribute is intended to replace Setuptools as the standard method
+        for working with Python module distributions.
+        
+        The fork has two goals:
+        
+        - Providing a backward compatible version to replace Setuptools
+          and make all distributions that depend on Setuptools work as
+          before, but with less bugs and behaviorial issues.
+        
+          This work is done in the 0.6.x series.
+        
+          Starting with version 0.6.2, Distribute supports Python 3.
+          Installing and using distribute for Python 3 code works exactly
+          the same as for Python 2 code, but Distribute also helps you to support
+          Python 2 and Python 3 from the same source code by letting you run 2to3
+          on the code as a part of the build process, by setting the keyword parameter
+          ``use_2to3`` to True. See http://packages.python.org/distribute for more
+          information.
+        
+        - Refactoring the code, and releasing it in several distributions.
+          This work is being done in the 0.7.x series but not yet released.
+        
+        The roadmap is still evolving, and the page that is up-to-date is
+        located at : `http://packages.python.org/distribute/roadmap`.
+        
+        If you install `Distribute` and want to switch back for any reason to
+        `Setuptools`, get to the `Uninstallation instructions`_ section.
+        
+        More documentation
+        ==================
+        
+        You can get more information in the Sphinx-based documentation, located
+        at http://packages.python.org/distribute. This documentation includes the old
+        Setuptools documentation that is slowly replaced, and brand new content.
+        
+        About the installation process
+        ==============================
+        
+        The `Distribute` installer modifies your installation by de-activating an
+        existing installation of `Setuptools` in a bootstrap process. This process
+        has been tested in various installation schemes and contexts but in case of a
+        bug during this process your Python installation might be left in a broken
+        state. Since all modified files and directories are copied before the
+        installation starts, you will be able to get back to a normal state by reading
+        the instructions in the `Uninstallation instructions`_ section.
+        
+        In any case, it is recommended to save you `site-packages` directory before
+        you start the installation of `Distribute`.
+        
+        -------------------------
+        Installation Instructions
+        -------------------------
+        
+        Distribute is only released as a source distribution.
+        
+        It can be installed using pip, and can be done so with the source tarball,
+        or by using the ``distribute_setup.py`` script provided online.
+        
+        ``distribute_setup.py`` is the simplest and preferred way on all systems.
+        
+        distribute_setup.py
+        ===================
+        
+        Download
+        `distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
+        and execute it, using the Python interpreter of your choice.
+        
+        If your shell has the ``curl`` program you can do::
+        
+            $ curl -O http://python-distribute.org/distribute_setup.py
+            $ python distribute_setup.py
+        
+        Notice this file is also provided in the source release.
+        
+        pip
+        ===
+        
+        Run easy_install or pip::
+        
+            $ pip install distribute
+        
+        Source installation
+        ===================
+        
+        Download the source tarball, uncompress it, then run the install command::
+        
+            $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.35.tar.gz
+            $ tar -xzvf distribute-0.6.35.tar.gz
+            $ cd distribute-0.6.35
+            $ python setup.py install
+        
+        ---------------------------
+        Uninstallation Instructions
+        ---------------------------
+        
+        Like other distutils-based distributions, Distribute doesn't provide an
+        uninstaller yet. It's all done manually! We are all waiting for PEP 376
+        support in Python.
+        
+        Distribute is installed in three steps:
+        
+        1. it gets out of the way an existing installation of Setuptools
+        2. it installs a `fake` setuptools installation
+        3. it installs distribute
+        
+        Distribute can be removed like this:
+        
+        - remove the ``distribute*.egg`` file located in your site-packages directory
+        - remove the ``setuptools.pth`` file located in you site-packages directory
+        - remove the easy_install script located in you ``sys.prefix/bin`` directory
+        - remove the ``setuptools*.egg`` directory located in your site-packages directory,
+          if any.
+        
+        If you want to get back to setuptools:
+        
+        - reinstall setuptools using its instruction.
+        
+        Lastly:
+        
+        - remove the *.OLD.* directory located in your site-packages directory if any,
+          **once you have checked everything was working correctly again**.
+        
+        -------------------------
+        Quick help for developers
+        -------------------------
+        
+        To create an egg which is compatible with Distribute, use the same
+        practice as with Setuptools, e.g.::
+        
+            from setuptools import setup
+        
+            setup(...
+            )
+        
+        To use `pkg_resources` to access data files in the egg, you should
+        require the Setuptools distribution explicitly::
+        
+            from setuptools import setup
+        
+            setup(...
+                install_requires=['setuptools']
+            )
+        
+        Only if you need Distribute-specific functionality should you depend
+        on it explicitly. In this case, replace the Setuptools dependency::
+        
+            from setuptools import setup
+        
+            setup(...
+                install_requires=['distribute']
+            )
+        
+        -----------
+        Install FAQ
+        -----------
+        
+        - **Why is Distribute wrapping my Setuptools installation?**
+        
+           Since Distribute is a fork, and since it provides the same package
+           and modules, it renames the existing Setuptools egg and inserts a
+           new one which merely wraps the Distribute code. This way, full
+           backwards compatibility is kept for packages which rely on the
+           Setuptools modules.
+        
+           At the same time, packages can meet their dependency on Setuptools
+           without actually installing it (which would disable Distribute).
+        
+        - **How does Distribute interact with virtualenv?**
+        
+          Everytime you create a virtualenv it will install setuptools by default.
+          You either need to re-install Distribute in it right after or pass the
+          ``--distribute`` option when creating it.
+        
+          Once installed, your virtualenv will use Distribute transparently.
+        
+          Although, if you have Setuptools installed in your system-wide Python,
+          and if the virtualenv you are in was generated without the `--no-site-packages`
+          option, the Distribute installation will stop.
+        
+          You need in this case to build a virtualenv with the `--no-site-packages`
+          option or to install `Distribute` globally.
+        
+        - **How does Distribute interacts with zc.buildout?**
+        
+          You can use Distribute in your zc.buildout, with the --distribute option,
+          starting at zc.buildout 1.4.2::
+        
+          $ python bootstrap.py --distribute
+        
+          For previous zc.buildout versions, *the only thing* you need to do
+          is use the bootstrap at `http://python-distribute.org/bootstrap.py`.  Run
+          that bootstrap and ``bin/buildout`` (and all other buildout-generated
+          scripts) will transparently use distribute instead of setuptools.  You do
+          not need a specific buildout release.
+        
+          A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
+          left in place unmodified.  So other buildouts that do not yet use the new
+          bootstrap continue to work just fine.  And there is no need to list
+          ``distribute`` somewhere in your eggs: using the bootstrap is enough.
+        
+          The source code for the bootstrap script is located at
+          `http://bitbucket.org/tarek/buildout-distribute`.
+        
+        
+        
+        -----------------------------
+        Feedback and getting involved
+        -----------------------------
+        
+        - Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
+        - Issue tracker: http://bitbucket.org/tarek/distribute/issues/
+        - Code Repository: http://bitbucket.org/tarek/distribute
+        
+        =======
+        CHANGES
+        =======
+        
+        ------
+        0.6.35
+        ------
+        
+        Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in
+        how it parses version numbers.
+        
+        * `Issue #278`_: Restored compatibility with distribute 0.6.22 and setuptools
+          0.6. Updated the documentation to match more closely with the version
+          parsing as intended in setuptools 0.6.
+        
+        ------
+        0.6.34
+        ------
+        
+        * `Issue #341`_: 0.6.33 fails to build under Python 2.4.
+        
+        ------
+        0.6.33
+        ------
+        
+        * Fix 2 errors with Jython 2.5.
+        * Fix 1 failure with Jython 2.5 and 2.7.
+        * Disable workaround for Jython scripts on Linux systems.
+        * `Issue #336`_: `setup.py` no longer masks failure exit code when tests fail.
+        * Fix issue in pkg_resources where try/except around a platform-dependent
+          import would trigger hook load failures on Mercurial. See pull request 32
+          for details.
+        * `Issue #341`_: Fix a ResourceWarning.
+        
+        ------
+        0.6.32
+        ------
+        
+        * Fix test suite with Python 2.6.
+        * Fix some DeprecationWarnings and ResourceWarnings.
+        * `Issue #335`_: Backed out `setup_requires` superceding installed requirements
+          until regression can be addressed.
+        
+        ------
+        0.6.31
+        ------
+        
+        * `Issue #303`_: Make sure the manifest only ever contains UTF-8 in Python 3.
+        * `Issue #329`_: Properly close files created by tests for compatibility with
+          Jython.
+        * Work around Jython bugs `#1980 <http://bugs.jython.org/issue1980>`_ and
+          `#1981 <http://bugs.jython.org/issue1981>`_.
+        * `Issue #334`_: Provide workaround for packages that reference `sys.__stdout__`
+          such as numpy does. This change should address
+          `virtualenv #359 <https://github.com/pypa/virtualenv/issues/359>`_ as long
+          as the system encoding is UTF-8 or the IO encoding is specified in the
+          environment, i.e.::
+        
+             PYTHONIOENCODING=utf8 pip install numpy
+        
+        * Fix for encoding issue when installing from Windows executable on Python 3.
+        * `Issue #323`_: Allow `setup_requires` requirements to supercede installed
+          requirements. Added some new keyword arguments to existing pkg_resources
+          methods. Also had to updated how __path__ is handled for namespace packages
+          to ensure that when a new egg distribution containing a namespace package is
+          placed on sys.path, the entries in __path__ are found in the same order they
+          would have been in had that egg been on the path when pkg_resources was
+          first imported.
+        
+        ------
+        0.6.30
+        ------
+        
+        * `Issue #328`_: Clean up temporary directories in distribute_setup.py.
+        * Fix fatal bug in distribute_setup.py.
+        
+        ------
+        0.6.29
+        ------
+        
+        * Pull Request #14: Honor file permissions in zip files.
+        * `Issue #327`_: Merged pull request #24 to fix a dependency problem with pip.
+        * Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301.
+        * If Sphinx is installed, the `upload_docs` command now runs `build_sphinx`
+          to produce uploadable documentation.
+        * `Issue #326`_: `upload_docs` provided mangled auth credentials under Python 3.
+        * `Issue #320`_: Fix check for "createable" in distribute_setup.py.
+        * `Issue #305`_: Remove a warning that was triggered during normal operations.
+        * `Issue #311`_: Print metadata in UTF-8 independent of platform.
+        * `Issue #303`_: Read manifest file with UTF-8 encoding under Python 3.
+        * `Issue #301`_: Allow to run tests of namespace packages when using 2to3.
+        * `Issue #304`_: Prevent import loop in site.py under Python 3.3.
+        * `Issue #283`_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3.
+        * `Issue #299`_: The develop command didn't work on Python 3, when using 2to3,
+          as the egg link would go to the Python 2 source. Linking to the 2to3'd code
+          in build/lib makes it work, although you will have to rebuild the module
+          before testing it.
+        * `Issue #306`_: Even if 2to3 is used, we build in-place under Python 2.
+        * `Issue #307`_: Prints the full path when .svn/entries is broken.
+        * `Issue #313`_: Support for sdist subcommands (Python 2.7)
+        * `Issue #314`_: test_local_index() would fail an OS X.
+        * `Issue #310`_: Non-ascii characters in a namespace __init__.py causes errors.
+        * `Issue #218`_: Improved documentation on behavior of `package_data` and
+          `include_package_data`. Files indicated by `package_data` are now included
+          in the manifest.
+        * `distribute_setup.py` now allows a `--download-base` argument for retrieving
+          distribute from a specified location.
+        
+        ------
+        0.6.28
+        ------
+        
+        * `Issue #294`_: setup.py can now be invoked from any directory.
+        * Scripts are now installed honoring the umask.
+        * Added support for .dist-info directories.
+        * `Issue #283`_: Fix and disable scanning of `*.pyc` / `*.pyo` files on
+          Python 3.3.
+        
+        ------
+        0.6.27
+        ------
+        
+        * Support current snapshots of CPython 3.3.
+        * Distribute now recognizes README.rst as a standard, default readme file.
+        * Exclude 'encodings' modules when removing modules from sys.modules.
+          Workaround for #285.
+        * `Issue #231`_: Don't fiddle with system python when used with buildout
+          (bootstrap.py)
+        
+        ------
+        0.6.26
+        ------
+        
+        * `Issue #183`_: Symlinked files are now extracted from source distributions.
+        * `Issue #227`_: Easy_install fetch parameters are now passed during the
+          installation of a source distribution; now fulfillment of setup_requires
+          dependencies will honor the parameters passed to easy_install.
+        
+        ------
+        0.6.25
+        ------
+        
+        * `Issue #258`_: Workaround a cache issue
+        * `Issue #260`_: distribute_setup.py now accepts the --user parameter for
+          Python 2.6 and later.
+        * `Issue #262`_: package_index.open_with_auth no longer throws LookupError
+          on Python 3.
+        * `Issue #269`_: AttributeError when an exception occurs reading Manifest.in
+          on late releases of Python.
+        * `Issue #272`_: Prevent TypeError when namespace package names are unicode
+          and single-install-externally-managed is used. Also fixes PIP `issue
+          449`_.
+        * `Issue #273`_: Legacy script launchers now install with Python2/3 support.
+        
+        ------
+        0.6.24
+        ------
+        
+        * `Issue #249`_: Added options to exclude 2to3 fixers
+        
+        ------
+        0.6.23
+        ------
+        
+        * `Issue #244`_: Fixed a test
+        * `Issue #243`_: Fixed a test
+        * `Issue #239`_: Fixed a test
+        * `Issue #240`_: Fixed a test
+        * `Issue #241`_: Fixed a test
+        * `Issue #237`_: Fixed a test
+        * `Issue #238`_: easy_install now uses 64bit executable wrappers on 64bit Python
+        * `Issue #208`_: Fixed parsed_versions, it now honors post-releases as noted in the documentation
+        * `Issue #207`_: Windows cli and gui wrappers pass CTRL-C to child python process
+        * `Issue #227`_: easy_install now passes its arguments to setup.py bdist_egg
+        * `Issue #225`_: Fixed a NameError on Python 2.5, 2.4
+        
+        ------
+        0.6.21
+        ------
+        
+        * `Issue #225`_: FIxed a regression on py2.4
+        
+        ------
+        0.6.20
+        ------
+        
+        * `Issue #135`_: Include url in warning when processing URLs in package_index.
+        * `Issue #212`_: Fix issue where easy_instal fails on Python 3 on windows installer.
+        * `Issue #213`_: Fix typo in documentation.
+        
+        ------
+        0.6.19
+        ------
+        
+        * `Issue 206`_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders'
+        
+        ------
+        0.6.18
+        ------
+        
+        * `Issue 210`_: Fixed a regression introduced by `Issue 204`_ fix.
+        
+        ------
+        0.6.17
+        ------
+        
+        * Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment
+          variable to allow to disable installation of easy_install-${version} script.
+        * Support Python >=3.1.4 and >=3.2.1.
+        * `Issue 204`_: Don't try to import the parent of a namespace package in
+          declare_namespace
+        * `Issue 196`_: Tolerate responses with multiple Content-Length headers
+        * `Issue 205`_: Sandboxing doesn't preserve working_set. Leads to setup_requires
+          problems.
+        
+        ------
+        0.6.16
+        ------
+        
+        * Builds sdist gztar even on Windows (avoiding `Issue 193`_).
+        * `Issue 192`_: Fixed metadata omitted on Windows when package_dir
+          specified with forward-slash.
+        * `Issue 195`_: Cython build support.
+        * `Issue 200`_: Issues with recognizing 64-bit packages on Windows.
+        
+        ------
+        0.6.15
+        ------
+        
+        * Fixed typo in bdist_egg
+        * Several issues under Python 3 has been solved.
+        * `Issue 146`_: Fixed missing DLL files after easy_install of windows exe package.
+        
+        ------
+        0.6.14
+        ------
+        
+        * `Issue 170`_: Fixed unittest failure. Thanks to Toshio.
+        * `Issue 171`_: Fixed race condition in unittests cause deadlocks in test suite.
+        * `Issue 143`_: Fixed a lookup issue with easy_install.
+          Thanks to David and Zooko.
+        * `Issue 174`_: Fixed the edit mode when its used with setuptools itself
+        
+        ------
+        0.6.13
+        ------
+        
+        * `Issue 160`_: 2.7 gives ValueError("Invalid IPv6 URL")
+        * `Issue 150`_: Fixed using ~/.local even in a --no-site-packages virtualenv
+        * `Issue 163`_: scan index links before external links, and don't use the md5 when
+          comparing two distributions
+        
+        ------
+        0.6.12
+        ------
+        
+        * `Issue 149`_: Fixed various failures on 2.3/2.4
+        
+        ------
+        0.6.11
+        ------
+        
+        * Found another case of SandboxViolation - fixed
+        * `Issue 15`_ and 48: Introduced a socket timeout of 15 seconds on url openings
+        * Added indexsidebar.html into MANIFEST.in
+        * `Issue 108`_: Fixed TypeError with Python3.1
+        * `Issue 121`_: Fixed --help install command trying to actually install.
+        * `Issue 112`_: Added an os.makedirs so that Tarek's solution will work.
+        * `Issue 133`_: Added --no-find-links to easy_install
+        * Added easy_install --user
+        * `Issue 100`_: Fixed develop --user not taking '.' in PYTHONPATH into account
+        * `Issue 134`_: removed spurious UserWarnings. Patch by VanLindberg
+        * `Issue 138`_: cant_write_to_target error when setup_requires is used.
+        * `Issue 147`_: respect the sys.dont_write_bytecode flag
+        
+        ------
+        0.6.10
+        ------
+        
+        * Reverted change made for the DistributionNotFound exception because
+          zc.buildout uses the exception message to get the name of the
+          distribution.
+        
+        -----
+        0.6.9
+        -----
+        
+        * `Issue 90`_: unknown setuptools version can be added in the working set
+        * `Issue 87`_: setupt.py doesn't try to convert distribute_setup.py anymore
+          Initial Patch by arfrever.
+        * `Issue 89`_: added a side bar with a download link to the doc.
+        * `Issue 86`_: fixed missing sentence in pkg_resources doc.
+        * Added a nicer error message when a DistributionNotFound is raised.
+        * `Issue 80`_: test_develop now works with Python 3.1
+        * `Issue 93`_: upload_docs now works if there is an empty sub-directory.
+        * `Issue 70`_: exec bit on non-exec files
+        * `Issue 99`_: now the standalone easy_install command doesn't uses a
+          "setup.cfg" if any exists in the working directory. It will use it
+          only if triggered by ``install_requires`` from a setup.py call
+          (install, develop, etc).
+        * `Issue 101`_: Allowing ``os.devnull`` in Sandbox
+        * `Issue 92`_: Fixed the "no eggs" found error with MacPort
+          (platform.mac_ver() fails)
+        * `Issue 103`_: test_get_script_header_jython_workaround not run
+          anymore under py3 with C or POSIX local. Contributed by Arfrever.
+        * `Issue 104`_: remvoved the assertion when the installation fails,
+          with a nicer message for the end user.
+        * `Issue 100`_: making sure there's no SandboxViolation when
+          the setup script patches setuptools.
+        
+        -----
+        0.6.8
+        -----
+        
+        * Added "check_packages" in dist. (added in Setuptools 0.6c11)
+        * Fixed the DONT_PATCH_SETUPTOOLS state.
+        
+        -----
+        0.6.7
+        -----
+        
+        * `Issue 58`_: Added --user support to the develop command
+        * `Issue 11`_: Generated scripts now wrap their call to the script entry point
+          in the standard "if name == 'main'"
+        * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
+          can drive an installation that doesn't patch a global setuptools.
+        * Reviewed unladen-swallow specific change from
+          http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
+          and determined that it no longer applies. Distribute should work fine with
+          Unladen Swallow 2009Q3.
+        * `Issue 21`_: Allow PackageIndex.open_url to gracefully handle all cases of a
+          httplib.HTTPException instead of just InvalidURL and BadStatusLine.
+        * Removed virtual-python.py from this distribution and updated documentation
+          to point to the actively maintained virtualenv instead.
+        * `Issue 64`_: use_setuptools no longer rebuilds the distribute egg every
+          time it is run
+        * use_setuptools now properly respects the requested version
+        * use_setuptools will no longer try to import a distribute egg for the
+          wrong Python version
+        * `Issue 74`_: no_fake should be True by default.
+        * `Issue 72`_: avoid a bootstrapping issue with easy_install -U
+        
+        -----
+        0.6.6
+        -----
+        
+        * Unified the bootstrap file so it works on both py2.x and py3k without 2to3
+          (patch by Holger Krekel)
+        
+        -----
+        0.6.5
+        -----
+        
+        * `Issue 65`_: cli.exe and gui.exe are now generated at build time,
+          depending on the platform in use.
+        
+        * `Issue 67`_: Fixed doc typo (PEP 381/382)
+        
+        * Distribute no longer shadows setuptools if we require a 0.7-series
+          setuptools.  And an error is raised when installing a 0.7 setuptools with
+          distribute.
+        
+        * When run from within buildout, no attempt is made to modify an existing
+          setuptools egg, whether in a shared egg directory or a system setuptools.
+        
+        * Fixed a hole in sandboxing allowing builtin file to write outside of
+          the sandbox.
+        
+        -----
+        0.6.4
+        -----
+        
+        * Added the generation of `distribute_setup_3k.py` during the release.
+          This closes `issue #52`_.
+        
+        * Added an upload_docs command to easily upload project documentation to
+          PyPI's http://packages.python.org. This close `issue #56`_.
+        
+        * Fixed a bootstrap bug on the use_setuptools() API.
+        
+        -----
+        0.6.3
+        -----
+        
+        setuptools
+        ==========
+        
+        * Fixed a bunch of calls to file() that caused crashes on Python 3.
+        
+        bootstrapping
+        =============
+        
+        * Fixed a bug in sorting that caused bootstrap to fail on Python 3.
+        
+        -----
+        0.6.2
+        -----
+        
+        setuptools
+        ==========
+        
+        * Added Python 3 support; see docs/python3.txt.
+          This closes http://bugs.python.org/setuptools/issue39.
+        
+        * Added option to run 2to3 automatically when installing on Python 3.
+          This closes `issue #31`_.
+        
+        * Fixed invalid usage of requirement.parse, that broke develop -d.
+          This closes http://bugs.python.org/setuptools/issue44.
+        
+        * Fixed script launcher for 64-bit Windows.
+          This closes http://bugs.python.org/setuptools/issue2.
+        
+        * KeyError when compiling extensions.
+          This closes http://bugs.python.org/setuptools/issue41.
+        
+        bootstrapping
+        =============
+        
+        * Fixed bootstrap not working on Windows. This closes `issue #49`_.
+        
+        * Fixed 2.6 dependencies. This closes `issue #50`_.
+        
+        * Make sure setuptools is patched when running through easy_install
+          This closes http://bugs.python.org/setuptools/issue40.
+        
+        -----
+        0.6.1
+        -----
+        
+        setuptools
+        ==========
+        
+        * package_index.urlopen now catches BadStatusLine and malformed url errors.
+          This closes `issue #16`_ and `issue #18`_.
+        
+        * zip_ok is now False by default. This closes
+          http://bugs.python.org/setuptools/issue33.
+        
+        * Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20.
+        
+        * Fixed invalid bootstraping with easy_install installation (`issue #40`_).
+          Thanks to Florian Schulze for the help.
+        
+        * Removed buildout/bootstrap.py. A new repository will create a specific
+          bootstrap.py script.
+        
+        
+        bootstrapping
+        =============
+        
+        * The boostrap process leave setuptools alone if detected in the system
+          and --root or --prefix is provided, but is not in the same location.
+          This closes `issue #10`_.
+        
+        ---
+        0.6
+        ---
+        
+        setuptools
+        ==========
+        
+        * Packages required at build time where not fully present at install time.
+          This closes `issue #12`_.
+        
+        * Protected against failures in tarfile extraction. This closes `issue #10`_.
+        
+        * Made Jython api_tests.txt doctest compatible. This closes `issue #7`_.
+        
+        * sandbox.py replaced builtin type file with builtin function open. This
+          closes `issue #6`_.
+        
+        * Immediately close all file handles. This closes `issue #3`_.
+        
+        * Added compatibility with Subversion 1.6. This references `issue #1`_.
+        
+        pkg_resources
+        =============
+        
+        * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
+          instead. Based on a patch from ronaldoussoren. This closes `issue #5`_.
+        
+        * Fixed a SandboxViolation for mkdir that could occur in certain cases.
+          This closes `issue #13`_.
+        
+        * Allow to find_on_path on systems with tight permissions to fail gracefully.
+          This closes `issue #9`_.
+        
+        * Corrected inconsistency between documentation and code of add_entry.
+          This closes `issue #8`_.
+        
+        * Immediately close all file handles. This closes `issue #3`_.
+        
+        easy_install
+        ============
+        
+        * Immediately close all file handles. This closes `issue #3`_.
+        
+        
+        .. _`Issue #135`: http://bitbucket.org/tarek/distribute/issue/135
+        .. _`Issue #183`: http://bitbucket.org/tarek/distribute/issue/183
+        .. _`Issue #207`: http://bitbucket.org/tarek/distribute/issue/207
+        .. _`Issue #208`: http://bitbucket.org/tarek/distribute/issue/208
+        .. _`Issue #212`: http://bitbucket.org/tarek/distribute/issue/212
+        .. _`Issue #213`: http://bitbucket.org/tarek/distribute/issue/213
+        .. _`Issue #218`: http://bitbucket.org/tarek/distribute/issue/218
+        .. _`Issue #225`: http://bitbucket.org/tarek/distribute/issue/225
+        .. _`Issue #227`: http://bitbucket.org/tarek/distribute/issue/227
+        .. _`Issue #231`: http://bitbucket.org/tarek/distribute/issue/231
+        .. _`Issue #237`: http://bitbucket.org/tarek/distribute/issue/237
+        .. _`Issue #238`: http://bitbucket.org/tarek/distribute/issue/238
+        .. _`Issue #239`: http://bitbucket.org/tarek/distribute/issue/239
+        .. _`Issue #240`: http://bitbucket.org/tarek/distribute/issue/240
+        .. _`Issue #241`: http://bitbucket.org/tarek/distribute/issue/241
+        .. _`Issue #243`: http://bitbucket.org/tarek/distribute/issue/243
+        .. _`Issue #244`: http://bitbucket.org/tarek/distribute/issue/244
+        .. _`Issue #249`: http://bitbucket.org/tarek/distribute/issue/249
+        .. _`Issue #258`: http://bitbucket.org/tarek/distribute/issue/258
+        .. _`Issue #260`: http://bitbucket.org/tarek/distribute/issue/260
+        .. _`Issue #262`: http://bitbucket.org/tarek/distribute/issue/262
+        .. _`Issue #269`: http://bitbucket.org/tarek/distribute/issue/269
+        .. _`Issue #272`: http://bitbucket.org/tarek/distribute/issue/272
+        .. _`Issue #273`: http://bitbucket.org/tarek/distribute/issue/273
+        .. _`Issue #278`: http://bitbucket.org/tarek/distribute/issue/278
+        .. _`Issue #283`: http://bitbucket.org/tarek/distribute/issue/283
+        .. _`Issue #294`: http://bitbucket.org/tarek/distribute/issue/294
+        .. _`Issue #299`: http://bitbucket.org/tarek/distribute/issue/299
+        .. _`Issue #301`: http://bitbucket.org/tarek/distribute/issue/301
+        .. _`Issue #303`: http://bitbucket.org/tarek/distribute/issue/303
+        .. _`Issue #304`: http://bitbucket.org/tarek/distribute/issue/304
+        .. _`Issue #305`: http://bitbucket.org/tarek/distribute/issue/305
+        .. _`Issue #306`: http://bitbucket.org/tarek/distribute/issue/306
+        .. _`Issue #307`: http://bitbucket.org/tarek/distribute/issue/307
+        .. _`Issue #310`: http://bitbucket.org/tarek/distribute/issue/310
+        .. _`Issue #311`: http://bitbucket.org/tarek/distribute/issue/311
+        .. _`Issue #313`: http://bitbucket.org/tarek/distribute/issue/313
+        .. _`Issue #314`: http://bitbucket.org/tarek/distribute/issue/314
+        .. _`Issue #320`: http://bitbucket.org/tarek/distribute/issue/320
+        .. _`Issue #323`: http://bitbucket.org/tarek/distribute/issue/323
+        .. _`Issue #326`: http://bitbucket.org/tarek/distribute/issue/326
+        .. _`Issue #327`: http://bitbucket.org/tarek/distribute/issue/327
+        .. _`Issue #328`: http://bitbucket.org/tarek/distribute/issue/328
+        .. _`Issue #329`: http://bitbucket.org/tarek/distribute/issue/329
+        .. _`Issue #334`: http://bitbucket.org/tarek/distribute/issue/334
+        .. _`Issue #335`: http://bitbucket.org/tarek/distribute/issue/335
+        .. _`Issue #336`: http://bitbucket.org/tarek/distribute/issue/336
+        .. _`Issue #341`: http://bitbucket.org/tarek/distribute/issue/341
+        .. _`Issue 100`: http://bitbucket.org/tarek/distribute/issue/100
+        .. _`Issue 101`: http://bitbucket.org/tarek/distribute/issue/101
+        .. _`Issue 103`: http://bitbucket.org/tarek/distribute/issue/103
+        .. _`Issue 104`: http://bitbucket.org/tarek/distribute/issue/104
+        .. _`Issue 108`: http://bitbucket.org/tarek/distribute/issue/108
+        .. _`Issue 11`: http://bitbucket.org/tarek/distribute/issue/11
+        .. _`Issue 112`: http://bitbucket.org/tarek/distribute/issue/112
+        .. _`Issue 121`: http://bitbucket.org/tarek/distribute/issue/121
+        .. _`Issue 133`: http://bitbucket.org/tarek/distribute/issue/133
+        .. _`Issue 134`: http://bitbucket.org/tarek/distribute/issue/134
+        .. _`Issue 138`: http://bitbucket.org/tarek/distribute/issue/138
+        .. _`Issue 143`: http://bitbucket.org/tarek/distribute/issue/143
+        .. _`Issue 146`: http://bitbucket.org/tarek/distribute/issue/146
+        .. _`Issue 147`: http://bitbucket.org/tarek/distribute/issue/147
+        .. _`Issue 149`: http://bitbucket.org/tarek/distribute/issue/149
+        .. _`Issue 15`: http://bitbucket.org/tarek/distribute/issue/15
+        .. _`Issue 150`: http://bitbucket.org/tarek/distribute/issue/150
+        .. _`Issue 160`: http://bitbucket.org/tarek/distribute/issue/160
+        .. _`Issue 163`: http://bitbucket.org/tarek/distribute/issue/163
+        .. _`Issue 170`: http://bitbucket.org/tarek/distribute/issue/170
+        .. _`Issue 171`: http://bitbucket.org/tarek/distribute/issue/171
+        .. _`Issue 174`: http://bitbucket.org/tarek/distribute/issue/174
+        .. _`Issue 192`: http://bitbucket.org/tarek/distribute/issue/192
+        .. _`Issue 193`: http://bitbucket.org/tarek/distribute/issue/193
+        .. _`Issue 195`: http://bitbucket.org/tarek/distribute/issue/195
+        .. _`Issue 196`: http://bitbucket.org/tarek/distribute/issue/196
+        .. _`Issue 200`: http://bitbucket.org/tarek/distribute/issue/200
+        .. _`Issue 204`: http://bitbucket.org/tarek/distribute/issue/204
+        .. _`Issue 205`: http://bitbucket.org/tarek/distribute/issue/205
+        .. _`Issue 206`: http://bitbucket.org/tarek/distribute/issue/206
+        .. _`Issue 21`: http://bitbucket.org/tarek/distribute/issue/21
+        .. _`Issue 210`: http://bitbucket.org/tarek/distribute/issue/210
+        .. _`Issue 58`: http://bitbucket.org/tarek/distribute/issue/58
+        .. _`Issue 64`: http://bitbucket.org/tarek/distribute/issue/64
+        .. _`Issue 65`: http://bitbucket.org/tarek/distribute/issue/65
+        .. _`Issue 67`: http://bitbucket.org/tarek/distribute/issue/67
+        .. _`Issue 70`: http://bitbucket.org/tarek/distribute/issue/70
+        .. _`Issue 72`: http://bitbucket.org/tarek/distribute/issue/72
+        .. _`Issue 74`: http://bitbucket.org/tarek/distribute/issue/74
+        .. _`Issue 80`: http://bitbucket.org/tarek/distribute/issue/80
+        .. _`Issue 86`: http://bitbucket.org/tarek/distribute/issue/86
+        .. _`Issue 87`: http://bitbucket.org/tarek/distribute/issue/87
+        .. _`Issue 89`: http://bitbucket.org/tarek/distribute/issue/89
+        .. _`Issue 90`: http://bitbucket.org/tarek/distribute/issue/90
+        .. _`Issue 92`: http://bitbucket.org/tarek/distribute/issue/92
+        .. _`Issue 93`: http://bitbucket.org/tarek/distribute/issue/93
+        .. _`Issue 99`: http://bitbucket.org/tarek/distribute/issue/99
+        .. _`issue
+          449`: http://bitbucket.org/tarek/distribute/issue/449
+        .. _`issue #1`: http://bitbucket.org/tarek/distribute/issue/1
+        .. _`issue #10`: http://bitbucket.org/tarek/distribute/issue/10
+        .. _`issue #12`: http://bitbucket.org/tarek/distribute/issue/12
+        .. _`issue #13`: http://bitbucket.org/tarek/distribute/issue/13
+        .. _`issue #16`: http://bitbucket.org/tarek/distribute/issue/16
+        .. _`issue #18`: http://bitbucket.org/tarek/distribute/issue/18
+        .. _`issue #3`: http://bitbucket.org/tarek/distribute/issue/3
+        .. _`issue #31`: http://bitbucket.org/tarek/distribute/issue/31
+        .. _`issue #40`: http://bitbucket.org/tarek/distribute/issue/40
+        .. _`issue #49`: http://bitbucket.org/tarek/distribute/issue/49
+        .. _`issue #5`: http://bitbucket.org/tarek/distribute/issue/5
+        .. _`issue #50`: http://bitbucket.org/tarek/distribute/issue/50
+        .. _`issue #52`: http://bitbucket.org/tarek/distribute/issue/52
+        .. _`issue #56`: http://bitbucket.org/tarek/distribute/issue/56
+        .. _`issue #6`: http://bitbucket.org/tarek/distribute/issue/6
+        .. _`issue #7`: http://bitbucket.org/tarek/distribute/issue/7
+        .. _`issue #8`: http://bitbucket.org/tarek/distribute/issue/8
+        .. _`issue #9`: http://bitbucket.org/tarek/distribute/issue/9
+        
+        
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/vendor/distribute-0.6.35/README.txt b/vendor/distribute-0.6.35/README.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ea13a9bc4362099ac385be3af56f30980f1f58a7
--- /dev/null
+++ b/vendor/distribute-0.6.35/README.txt
@@ -0,0 +1,228 @@
+===============================
+Installing and Using Distribute
+===============================
+
+.. contents:: **Table of Contents**
+
+-----------
+Disclaimers
+-----------
+
+About the fork
+==============
+
+`Distribute` is a fork of the `Setuptools` project.
+
+Distribute is intended to replace Setuptools as the standard method
+for working with Python module distributions.
+
+The fork has two goals:
+
+- Providing a backward compatible version to replace Setuptools
+  and make all distributions that depend on Setuptools work as
+  before, but with less bugs and behaviorial issues.
+
+  This work is done in the 0.6.x series.
+
+  Starting with version 0.6.2, Distribute supports Python 3.
+  Installing and using distribute for Python 3 code works exactly
+  the same as for Python 2 code, but Distribute also helps you to support
+  Python 2 and Python 3 from the same source code by letting you run 2to3
+  on the code as a part of the build process, by setting the keyword parameter
+  ``use_2to3`` to True. See http://packages.python.org/distribute for more
+  information.
+
+- Refactoring the code, and releasing it in several distributions.
+  This work is being done in the 0.7.x series but not yet released.
+
+The roadmap is still evolving, and the page that is up-to-date is
+located at : `http://packages.python.org/distribute/roadmap`.
+
+If you install `Distribute` and want to switch back for any reason to
+`Setuptools`, get to the `Uninstallation instructions`_ section.
+
+More documentation
+==================
+
+You can get more information in the Sphinx-based documentation, located
+at http://packages.python.org/distribute. This documentation includes the old
+Setuptools documentation that is slowly replaced, and brand new content.
+
+About the installation process
+==============================
+
+The `Distribute` installer modifies your installation by de-activating an
+existing installation of `Setuptools` in a bootstrap process. This process
+has been tested in various installation schemes and contexts but in case of a
+bug during this process your Python installation might be left in a broken
+state. Since all modified files and directories are copied before the
+installation starts, you will be able to get back to a normal state by reading
+the instructions in the `Uninstallation instructions`_ section.
+
+In any case, it is recommended to save you `site-packages` directory before
+you start the installation of `Distribute`.
+
+-------------------------
+Installation Instructions
+-------------------------
+
+Distribute is only released as a source distribution.
+
+It can be installed using pip, and can be done so with the source tarball,
+or by using the ``distribute_setup.py`` script provided online.
+
+``distribute_setup.py`` is the simplest and preferred way on all systems.
+
+distribute_setup.py
+===================
+
+Download
+`distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
+and execute it, using the Python interpreter of your choice.
+
+If your shell has the ``curl`` program you can do::
+
+    $ curl -O http://python-distribute.org/distribute_setup.py
+    $ python distribute_setup.py
+
+Notice this file is also provided in the source release.
+
+pip
+===
+
+Run easy_install or pip::
+
+    $ pip install distribute
+
+Source installation
+===================
+
+Download the source tarball, uncompress it, then run the install command::
+
+    $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.35.tar.gz
+    $ tar -xzvf distribute-0.6.35.tar.gz
+    $ cd distribute-0.6.35
+    $ python setup.py install
+
+---------------------------
+Uninstallation Instructions
+---------------------------
+
+Like other distutils-based distributions, Distribute doesn't provide an
+uninstaller yet. It's all done manually! We are all waiting for PEP 376
+support in Python.
+
+Distribute is installed in three steps:
+
+1. it gets out of the way an existing installation of Setuptools
+2. it installs a `fake` setuptools installation
+3. it installs distribute
+
+Distribute can be removed like this:
+
+- remove the ``distribute*.egg`` file located in your site-packages directory
+- remove the ``setuptools.pth`` file located in you site-packages directory
+- remove the easy_install script located in you ``sys.prefix/bin`` directory
+- remove the ``setuptools*.egg`` directory located in your site-packages directory,
+  if any.
+
+If you want to get back to setuptools:
+
+- reinstall setuptools using its instruction.
+
+Lastly:
+
+- remove the *.OLD.* directory located in your site-packages directory if any,
+  **once you have checked everything was working correctly again**.
+
+-------------------------
+Quick help for developers
+-------------------------
+
+To create an egg which is compatible with Distribute, use the same
+practice as with Setuptools, e.g.::
+
+    from setuptools import setup
+
+    setup(...
+    )
+
+To use `pkg_resources` to access data files in the egg, you should
+require the Setuptools distribution explicitly::
+
+    from setuptools import setup
+
+    setup(...
+        install_requires=['setuptools']
+    )
+
+Only if you need Distribute-specific functionality should you depend
+on it explicitly. In this case, replace the Setuptools dependency::
+
+    from setuptools import setup
+
+    setup(...
+        install_requires=['distribute']
+    )
+
+-----------
+Install FAQ
+-----------
+
+- **Why is Distribute wrapping my Setuptools installation?**
+
+   Since Distribute is a fork, and since it provides the same package
+   and modules, it renames the existing Setuptools egg and inserts a
+   new one which merely wraps the Distribute code. This way, full
+   backwards compatibility is kept for packages which rely on the
+   Setuptools modules.
+
+   At the same time, packages can meet their dependency on Setuptools
+   without actually installing it (which would disable Distribute).
+
+- **How does Distribute interact with virtualenv?**
+
+  Everytime you create a virtualenv it will install setuptools by default.
+  You either need to re-install Distribute in it right after or pass the
+  ``--distribute`` option when creating it.
+
+  Once installed, your virtualenv will use Distribute transparently.
+
+  Although, if you have Setuptools installed in your system-wide Python,
+  and if the virtualenv you are in was generated without the `--no-site-packages`
+  option, the Distribute installation will stop.
+
+  You need in this case to build a virtualenv with the `--no-site-packages`
+  option or to install `Distribute` globally.
+
+- **How does Distribute interacts with zc.buildout?**
+
+  You can use Distribute in your zc.buildout, with the --distribute option,
+  starting at zc.buildout 1.4.2::
+
+  $ python bootstrap.py --distribute
+
+  For previous zc.buildout versions, *the only thing* you need to do
+  is use the bootstrap at `http://python-distribute.org/bootstrap.py`.  Run
+  that bootstrap and ``bin/buildout`` (and all other buildout-generated
+  scripts) will transparently use distribute instead of setuptools.  You do
+  not need a specific buildout release.
+
+  A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
+  left in place unmodified.  So other buildouts that do not yet use the new
+  bootstrap continue to work just fine.  And there is no need to list
+  ``distribute`` somewhere in your eggs: using the bootstrap is enough.
+
+  The source code for the bootstrap script is located at
+  `http://bitbucket.org/tarek/buildout-distribute`.
+
+
+
+-----------------------------
+Feedback and getting involved
+-----------------------------
+
+- Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
+- Issue tracker: http://bitbucket.org/tarek/distribute/issues/
+- Code Repository: http://bitbucket.org/tarek/distribute
+
diff --git a/vendor/distribute-0.6.35/_markerlib/__init__.py b/vendor/distribute-0.6.35/_markerlib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2b237b1f6444537f1243a8027dc19a0d8bc7b74
--- /dev/null
+++ b/vendor/distribute-0.6.35/_markerlib/__init__.py
@@ -0,0 +1,16 @@
+try:
+    import ast
+    from _markerlib.markers import default_environment, compile, interpret
+except ImportError:
+    if 'ast' in globals():
+        raise
+    def default_environment():
+        return {}
+    def compile(marker):
+        def marker_fn(environment=None, override=None):
+            # 'empty markers are True' heuristic won't install extra deps.
+            return not marker.strip()
+        marker_fn.__doc__ = marker
+        return marker_fn
+    def interpret(marker, environment=None, override=None):
+        return compile(marker)()
diff --git a/vendor/distribute-0.6.35/_markerlib/markers.py b/vendor/distribute-0.6.35/_markerlib/markers.py
new file mode 100644
index 0000000000000000000000000000000000000000..c93d7f3b671f8234de9a08e68d5f1d194b507c55
--- /dev/null
+++ b/vendor/distribute-0.6.35/_markerlib/markers.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+"""Interpret PEP 345 environment markers.
+
+EXPR [in|==|!=|not in] EXPR [or|and] ...
+
+where EXPR belongs to any of those:
+
+    python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+    python_full_version = sys.version.split()[0]
+    os.name = os.name
+    sys.platform = sys.platform
+    platform.version = platform.version()
+    platform.machine = platform.machine()
+    platform.python_implementation = platform.python_implementation()
+    a free string, like '2.6', or 'win32'
+"""
+
+__all__ = ['default_environment', 'compile', 'interpret']
+
+import ast
+import os
+import platform
+import sys
+import weakref
+
+_builtin_compile = compile
+
+try:
+    from platform import python_implementation
+except ImportError:
+    if os.name == "java":
+        # Jython 2.5 has ast module, but not platform.python_implementation() function.
+        def python_implementation():
+            return "Jython"
+    else:
+        raise
+
+
+# restricted set of variables
+_VARS = {'sys.platform': sys.platform,
+         'python_version': '%s.%s' % sys.version_info[:2],
+         # FIXME parsing sys.platform is not reliable, but there is no other
+         # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
+         'python_full_version': sys.version.split(' ', 1)[0],
+         'os.name': os.name,
+         'platform.version': platform.version(),
+         'platform.machine': platform.machine(),
+         'platform.python_implementation': python_implementation(),
+         'extra': None # wheel extension
+        }
+
+def default_environment():
+    """Return copy of default PEP 385 globals dictionary."""
+    return dict(_VARS)
+
+class ASTWhitelist(ast.NodeTransformer):
+    def __init__(self, statement):
+        self.statement = statement # for error messages
+
+    ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
+    # Bool operations
+    ALLOWED += (ast.And, ast.Or)
+    # Comparison operations
+    ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
+
+    def visit(self, node):
+        """Ensure statement only contains allowed nodes."""
+        if not isinstance(node, self.ALLOWED):
+            raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
+                               (self.statement,
+                               (' ' * node.col_offset) + '^'))
+        return ast.NodeTransformer.visit(self, node)
+
+    def visit_Attribute(self, node):
+        """Flatten one level of attribute access."""
+        new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
+        return ast.copy_location(new_node, node)
+
+def parse_marker(marker):
+    tree = ast.parse(marker, mode='eval')
+    new_tree = ASTWhitelist(marker).generic_visit(tree)
+    return new_tree
+
+def compile_marker(parsed_marker):
+    return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
+                   dont_inherit=True)
+
+_cache = weakref.WeakValueDictionary()
+
+def compile(marker):
+    """Return compiled marker as a function accepting an environment dict."""
+    try:
+        return _cache[marker]
+    except KeyError:
+        pass
+    if not marker.strip():
+        def marker_fn(environment=None, override=None):
+            """"""
+            return True
+    else:
+        compiled_marker = compile_marker(parse_marker(marker))
+        def marker_fn(environment=None, override=None):
+            """override updates environment"""
+            if override is None:
+                override = {}
+            if environment is None:
+                environment = default_environment()
+            environment.update(override)
+            return eval(compiled_marker, environment)
+    marker_fn.__doc__ = marker
+    _cache[marker] = marker_fn
+    return _cache[marker]
+
+def interpret(marker, environment=None):
+    return compile(marker)(environment)
diff --git a/vendor/distribute-0.6.35/distribute_setup.py b/vendor/distribute-0.6.35/distribute_setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..a447f7ec8f568ee6b31a7a1c9136d74ca808d793
--- /dev/null
+++ b/vendor/distribute-0.6.35/distribute_setup.py
@@ -0,0 +1,546 @@
+#!python
+"""Bootstrap distribute installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+    from distribute_setup import use_setuptools
+    use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import shutil
+import sys
+import time
+import fnmatch
+import tempfile
+import tarfile
+import optparse
+
+from distutils import log
+
+try:
+    from site import USER_SITE
+except ImportError:
+    USER_SITE = None
+
+try:
+    import subprocess
+
+    def _python_cmd(*args):
+        args = (sys.executable,) + args
+        return subprocess.call(args) == 0
+
+except ImportError:
+    # will be used for python 2.3
+    def _python_cmd(*args):
+        args = (sys.executable,) + args
+        # quoting arguments if windows
+        if sys.platform == 'win32':
+            def quote(arg):
+                if ' ' in arg:
+                    return '"%s"' % arg
+                return arg
+            args = [quote(arg) for arg in args]
+        return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
+
+DEFAULT_VERSION = "0.6.35"
+DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
+SETUPTOOLS_FAKED_VERSION = "0.6c11"
+
+SETUPTOOLS_PKG_INFO = """\
+Metadata-Version: 1.0
+Name: setuptools
+Version: %s
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
+""" % SETUPTOOLS_FAKED_VERSION
+
+
+def _install(tarball, install_args=()):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # installing
+        log.warn('Installing Distribute')
+        if not _python_cmd('setup.py', 'install', *install_args):
+            log.warn('Something went wrong during the installation.')
+            log.warn('See the error message above.')
+            # exitcode will be 2
+            return 2
+    finally:
+        os.chdir(old_wd)
+        shutil.rmtree(tmpdir)
+
+
+def _build_egg(egg, tarball, to_dir):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # building an egg
+        log.warn('Building a Distribute egg in %s', to_dir)
+        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+    finally:
+        os.chdir(old_wd)
+        shutil.rmtree(tmpdir)
+    # returning the result
+    log.warn(egg)
+    if not os.path.exists(egg):
+        raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+    egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
+                       % (version, sys.version_info[0], sys.version_info[1]))
+    if not os.path.exists(egg):
+        tarball = download_setuptools(version, download_base,
+                                      to_dir, download_delay)
+        _build_egg(egg, tarball, to_dir)
+    sys.path.insert(0, egg)
+    import setuptools
+    setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                   to_dir=os.curdir, download_delay=15, no_fake=True):
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    was_imported = 'pkg_resources' in sys.modules or \
+        'setuptools' in sys.modules
+    try:
+        try:
+            import pkg_resources
+            if not hasattr(pkg_resources, '_distribute'):
+                if not no_fake:
+                    _fake_setuptools()
+                raise ImportError
+        except ImportError:
+            return _do_download(version, download_base, to_dir, download_delay)
+        try:
+            pkg_resources.require("distribute>=" + version)
+            return
+        except pkg_resources.VersionConflict:
+            e = sys.exc_info()[1]
+            if was_imported:
+                sys.stderr.write(
+                "The required version of distribute (>=%s) is not available,\n"
+                "and can't be installed while this script is running. Please\n"
+                "install a more recent version first, using\n"
+                "'easy_install -U distribute'."
+                "\n\n(Currently using %r)\n" % (version, e.args[0]))
+                sys.exit(2)
+            else:
+                del pkg_resources, sys.modules['pkg_resources']    # reload ok
+                return _do_download(version, download_base, to_dir,
+                                    download_delay)
+        except pkg_resources.DistributionNotFound:
+            return _do_download(version, download_base, to_dir,
+                                download_delay)
+    finally:
+        if not no_fake:
+            _create_fake_setuptools_pkg_info(to_dir)
+
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                        to_dir=os.curdir, delay=15):
+    """Download distribute from a specified location and return its filename
+
+    `version` should be a valid distribute version number that is available
+    as an egg for download under the `download_base` URL (which should end
+    with a '/'). `to_dir` is the directory where the egg will be downloaded.
+    `delay` is the number of seconds to pause before an actual download
+    attempt.
+    """
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    try:
+        from urllib.request import urlopen
+    except ImportError:
+        from urllib2 import urlopen
+    tgz_name = "distribute-%s.tar.gz" % version
+    url = download_base + tgz_name
+    saveto = os.path.join(to_dir, tgz_name)
+    src = dst = None
+    if not os.path.exists(saveto):  # Avoid repeated downloads
+        try:
+            log.warn("Downloading %s", url)
+            src = urlopen(url)
+            # Read/write all in one block, so we don't create a corrupt file
+            # if the download is interrupted.
+            data = src.read()
+            dst = open(saveto, "wb")
+            dst.write(data)
+        finally:
+            if src:
+                src.close()
+            if dst:
+                dst.close()
+    return os.path.realpath(saveto)
+
+
+def _no_sandbox(function):
+    def __no_sandbox(*args, **kw):
+        try:
+            from setuptools.sandbox import DirectorySandbox
+            if not hasattr(DirectorySandbox, '_old'):
+                def violation(*args):
+                    pass
+                DirectorySandbox._old = DirectorySandbox._violation
+                DirectorySandbox._violation = violation
+                patched = True
+            else:
+                patched = False
+        except ImportError:
+            patched = False
+
+        try:
+            return function(*args, **kw)
+        finally:
+            if patched:
+                DirectorySandbox._violation = DirectorySandbox._old
+                del DirectorySandbox._old
+
+    return __no_sandbox
+
+
+def _patch_file(path, content):
+    """Will backup the file then patch it"""
+    f = open(path)
+    existing_content = f.read()
+    f.close()
+    if existing_content == content:
+        # already patched
+        log.warn('Already patched.')
+        return False
+    log.warn('Patching...')
+    _rename_path(path)
+    f = open(path, 'w')
+    try:
+        f.write(content)
+    finally:
+        f.close()
+    return True
+
+_patch_file = _no_sandbox(_patch_file)
+
+
+def _same_content(path, content):
+    f = open(path)
+    existing_content = f.read()
+    f.close()
+    return existing_content == content
+
+
+def _rename_path(path):
+    new_name = path + '.OLD.%s' % time.time()
+    log.warn('Renaming %s to %s', path, new_name)
+    os.rename(path, new_name)
+    return new_name
+
+
+def _remove_flat_installation(placeholder):
+    if not os.path.isdir(placeholder):
+        log.warn('Unkown installation at %s', placeholder)
+        return False
+    found = False
+    for file in os.listdir(placeholder):
+        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
+            found = True
+            break
+    if not found:
+        log.warn('Could not locate setuptools*.egg-info')
+        return
+
+    log.warn('Moving elements out of the way...')
+    pkg_info = os.path.join(placeholder, file)
+    if os.path.isdir(pkg_info):
+        patched = _patch_egg_dir(pkg_info)
+    else:
+        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
+
+    if not patched:
+        log.warn('%s already patched.', pkg_info)
+        return False
+    # now let's move the files out of the way
+    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
+        element = os.path.join(placeholder, element)
+        if os.path.exists(element):
+            _rename_path(element)
+        else:
+            log.warn('Could not find the %s element of the '
+                     'Setuptools distribution', element)
+    return True
+
+_remove_flat_installation = _no_sandbox(_remove_flat_installation)
+
+
+def _after_install(dist):
+    log.warn('After install bootstrap.')
+    placeholder = dist.get_command_obj('install').install_purelib
+    _create_fake_setuptools_pkg_info(placeholder)
+
+
+def _create_fake_setuptools_pkg_info(placeholder):
+    if not placeholder or not os.path.exists(placeholder):
+        log.warn('Could not find the install location')
+        return
+    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
+            (SETUPTOOLS_FAKED_VERSION, pyver)
+    pkg_info = os.path.join(placeholder, setuptools_file)
+    if os.path.exists(pkg_info):
+        log.warn('%s already exists', pkg_info)
+        return
+
+    log.warn('Creating %s', pkg_info)
+    try:
+        f = open(pkg_info, 'w')
+    except EnvironmentError:
+        log.warn("Don't have permissions to write %s, skipping", pkg_info)
+        return
+    try:
+        f.write(SETUPTOOLS_PKG_INFO)
+    finally:
+        f.close()
+
+    pth_file = os.path.join(placeholder, 'setuptools.pth')
+    log.warn('Creating %s', pth_file)
+    f = open(pth_file, 'w')
+    try:
+        f.write(os.path.join(os.curdir, setuptools_file))
+    finally:
+        f.close()
+
+_create_fake_setuptools_pkg_info = _no_sandbox(
+    _create_fake_setuptools_pkg_info
+)
+
+
+def _patch_egg_dir(path):
+    # let's check if it's already patched
+    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    if os.path.exists(pkg_info):
+        if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
+            log.warn('%s already patched.', pkg_info)
+            return False
+    _rename_path(path)
+    os.mkdir(path)
+    os.mkdir(os.path.join(path, 'EGG-INFO'))
+    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    f = open(pkg_info, 'w')
+    try:
+        f.write(SETUPTOOLS_PKG_INFO)
+    finally:
+        f.close()
+    return True
+
+_patch_egg_dir = _no_sandbox(_patch_egg_dir)
+
+
+def _before_install():
+    log.warn('Before install bootstrap.')
+    _fake_setuptools()
+
+
+def _under_prefix(location):
+    if 'install' not in sys.argv:
+        return True
+    args = sys.argv[sys.argv.index('install') + 1:]
+    for index, arg in enumerate(args):
+        for option in ('--root', '--prefix'):
+            if arg.startswith('%s=' % option):
+                top_dir = arg.split('root=')[-1]
+                return location.startswith(top_dir)
+            elif arg == option:
+                if len(args) > index:
+                    top_dir = args[index + 1]
+                    return location.startswith(top_dir)
+        if arg == '--user' and USER_SITE is not None:
+            return location.startswith(USER_SITE)
+    return True
+
+
+def _fake_setuptools():
+    log.warn('Scanning installed packages')
+    try:
+        import pkg_resources
+    except ImportError:
+        # we're cool
+        log.warn('Setuptools or Distribute does not seem to be installed.')
+        return
+    ws = pkg_resources.working_set
+    try:
+        setuptools_dist = ws.find(
+            pkg_resources.Requirement.parse('setuptools', replacement=False)
+            )
+    except TypeError:
+        # old distribute API
+        setuptools_dist = ws.find(
+            pkg_resources.Requirement.parse('setuptools')
+        )
+
+    if setuptools_dist is None:
+        log.warn('No setuptools distribution found')
+        return
+    # detecting if it was already faked
+    setuptools_location = setuptools_dist.location
+    log.warn('Setuptools installation detected at %s', setuptools_location)
+
+    # if --root or --preix was provided, and if
+    # setuptools is not located in them, we don't patch it
+    if not _under_prefix(setuptools_location):
+        log.warn('Not patching, --root or --prefix is installing Distribute'
+                 ' in another location')
+        return
+
+    # let's see if its an egg
+    if not setuptools_location.endswith('.egg'):
+        log.warn('Non-egg installation')
+        res = _remove_flat_installation(setuptools_location)
+        if not res:
+            return
+    else:
+        log.warn('Egg installation')
+        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
+        if (os.path.exists(pkg_info) and
+            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
+            log.warn('Already patched.')
+            return
+        log.warn('Patching...')
+        # let's create a fake egg replacing setuptools one
+        res = _patch_egg_dir(setuptools_location)
+        if not res:
+            return
+    log.warn('Patching complete.')
+    _relaunch()
+
+
+def _relaunch():
+    log.warn('Relaunching...')
+    # we have to relaunch the process
+    # pip marker to avoid a relaunch bug
+    _cmd1 = ['-c', 'install', '--single-version-externally-managed']
+    _cmd2 = ['-c', 'install', '--record']
+    if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
+        sys.argv[0] = 'setup.py'
+    args = [sys.executable] + sys.argv
+    sys.exit(subprocess.call(args))
+
+
+def _extractall(self, path=".", members=None):
+    """Extract all members from the archive to the current working
+       directory and set owner, modification time and permissions on
+       directories afterwards. `path' specifies a different directory
+       to extract to. `members' is optional and must be a subset of the
+       list returned by getmembers().
+    """
+    import copy
+    import operator
+    from tarfile import ExtractError
+    directories = []
+
+    if members is None:
+        members = self
+
+    for tarinfo in members:
+        if tarinfo.isdir():
+            # Extract directories with a safe mode.
+            directories.append(tarinfo)
+            tarinfo = copy.copy(tarinfo)
+            tarinfo.mode = 448  # decimal for oct 0700
+        self.extract(tarinfo, path)
+
+    # Reverse sort directories.
+    if sys.version_info < (2, 4):
+        def sorter(dir1, dir2):
+            return cmp(dir1.name, dir2.name)
+        directories.sort(sorter)
+        directories.reverse()
+    else:
+        directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+    # Set correct owner, mtime and filemode on directories.
+    for tarinfo in directories:
+        dirpath = os.path.join(path, tarinfo.name)
+        try:
+            self.chown(tarinfo, dirpath)
+            self.utime(tarinfo, dirpath)
+            self.chmod(tarinfo, dirpath)
+        except ExtractError:
+            e = sys.exc_info()[1]
+            if self.errorlevel > 1:
+                raise
+            else:
+                self._dbg(1, "tarfile: %s" % e)
+
+
+def _build_install_args(options):
+    """
+    Build the arguments to 'python setup.py install' on the distribute package
+    """
+    install_args = []
+    if options.user_install:
+        if sys.version_info < (2, 6):
+            log.warn("--user requires Python 2.6 or later")
+            raise SystemExit(1)
+        install_args.append('--user')
+    return install_args
+
+def _parse_args():
+    """
+    Parse the command line for options
+    """
+    parser = optparse.OptionParser()
+    parser.add_option(
+        '--user', dest='user_install', action='store_true', default=False,
+        help='install in user site package (requires Python 2.6 or later)')
+    parser.add_option(
+        '--download-base', dest='download_base', metavar="URL",
+        default=DEFAULT_URL,
+        help='alternative URL from where to download the distribute package')
+    options, args = parser.parse_args()
+    # positional arguments are ignored
+    return options
+
+def main(version=DEFAULT_VERSION):
+    """Install or upgrade setuptools and EasyInstall"""
+    options = _parse_args()
+    tarball = download_setuptools(download_base=options.download_base)
+    return _install(tarball, _build_install_args(options))
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/vendor/distribute-0.6.35/docs/Makefile b/vendor/distribute-0.6.35/docs/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..30bf10a930f084877b9b19d035651687e5a875de
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/Makefile
@@ -0,0 +1,75 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html web pickle htmlhelp latex changes linkcheck
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html      to make standalone HTML files"
+	@echo "  pickle    to make pickle files"
+	@echo "  json      to make JSON files"
+	@echo "  htmlhelp  to make HTML files and a HTML help project"
+	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  changes   to make an overview over all changed/added/deprecated items"
+	@echo "  linkcheck to check all external links for integrity"
+
+clean:
+	-rm -rf build/*
+
+html:
+	mkdir -p build/html build/doctrees
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
+	@echo
+	@echo "Build finished. The HTML pages are in build/html."
+
+pickle:
+	mkdir -p build/pickle build/doctrees
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+web: pickle
+
+json:
+	mkdir -p build/json build/doctrees
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	mkdir -p build/htmlhelp build/doctrees
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in build/htmlhelp."
+
+latex:
+	mkdir -p build/latex build/doctrees
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in build/latex."
+	@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
+	      "run these through (pdf)latex."
+
+changes:
+	mkdir -p build/changes build/doctrees
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
+	@echo
+	@echo "The overview file is in build/changes."
+
+linkcheck:
+	mkdir -p build/linkcheck build/doctrees
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in build/linkcheck/output.txt."
diff --git a/vendor/distribute-0.6.35/docs/_templates/indexsidebar.html b/vendor/distribute-0.6.35/docs/_templates/indexsidebar.html
new file mode 100644
index 0000000000000000000000000000000000000000..932909f3e100eda84b017d1fbb38c81d488c1d44
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/_templates/indexsidebar.html
@@ -0,0 +1,8 @@
+<h3>Download</h3> 
+
+<p>Current version: <b>{{ version }}</b></p> 
+<p>Get Distribute from the <a href="http://pypi.python.org/pypi/distribute"> Python Package Index</a>
+
+<h3>Questions? Suggestions? Contributions?</h3>
+
+<p>Visit the <a href="http://bitbucket.org/tarek/distribute">Distribute project page</a> </p>
diff --git a/vendor/distribute-0.6.35/docs/_theme/nature/static/nature.css_t b/vendor/distribute-0.6.35/docs/_theme/nature/static/nature.css_t
new file mode 100644
index 0000000000000000000000000000000000000000..1a654264d17b66f2098c1a74978c4e7bdfaf17fd
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/_theme/nature/static/nature.css_t
@@ -0,0 +1,237 @@
+/**
+ * Sphinx stylesheet -- default theme
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ */
+ 
+@import url("basic.css");
+ 
+/* -- page layout ----------------------------------------------------------- */
+ 
+body {
+    font-family: Arial, sans-serif;
+    font-size: 100%;
+    background-color: #111111;
+    color: #555555;
+    margin: 0;
+    padding: 0;
+}
+
+div.documentwrapper {
+    float: left;
+    width: 100%;
+}
+
+div.bodywrapper {
+    margin: 0 0 0 300px;
+}
+
+hr{
+    border: 1px solid #B1B4B6;
+}
+ 
+div.document {
+    background-color: #fafafa;
+}
+ 
+div.body {
+    background-color: #ffffff;
+    color: #3E4349;
+    padding: 1em 30px 30px 30px;
+    font-size: 0.9em;
+}
+ 
+div.footer {
+    color: #555;
+    width: 100%;
+    padding: 13px 0;
+    text-align: center;
+    font-size: 75%;
+}
+ 
+div.footer a {
+    color: #444444;
+}
+ 
+div.related {
+    background-color: #6BA81E;
+    line-height: 36px;
+    color: #ffffff;
+    text-shadow: 0px 1px 0 #444444;
+    font-size: 1.1em;
+}
+ 
+div.related a {
+    color: #E2F3CC;
+}
+
+div.related .right {
+    font-size: 0.9em;
+}
+
+div.sphinxsidebar {
+    font-size: 0.9em;
+    line-height: 1.5em;
+    width: 300px;
+}
+
+div.sphinxsidebarwrapper{
+    padding: 20px 0;
+}
+ 
+div.sphinxsidebar h3,
+div.sphinxsidebar h4 {
+    font-family: Arial, sans-serif;
+    color: #222222;
+    font-size: 1.2em;
+    font-weight: bold;
+    margin: 0;
+    padding: 5px 10px;
+    text-shadow: 1px 1px 0 white
+}
+
+div.sphinxsidebar h3 a {
+    color: #444444;
+}
+
+div.sphinxsidebar p {
+    color: #888888;
+    padding: 5px 20px;
+    margin: 0.5em 0px;
+}
+ 
+div.sphinxsidebar p.topless {
+}
+ 
+div.sphinxsidebar ul {
+    margin: 10px 10px 10px 20px;
+    padding: 0;
+    color: #000000;
+}
+ 
+div.sphinxsidebar a {
+    color: #444444;
+}
+
+div.sphinxsidebar a:hover {
+    color: #E32E00;
+}
+
+div.sphinxsidebar input {
+    border: 1px solid #cccccc;
+    font-family: sans-serif;
+    font-size: 1.1em;
+    padding: 0.15em 0.3em;
+}
+
+div.sphinxsidebar input[type=text]{
+    margin-left: 20px;
+}
+ 
+/* -- body styles ----------------------------------------------------------- */
+ 
+a {
+    color: #005B81;
+    text-decoration: none;
+}
+ 
+a:hover {
+    color: #E32E00;
+}
+ 
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+    font-family: Arial, sans-serif;
+    font-weight: normal;
+    color: #212224;
+    margin: 30px 0px 10px 0px;
+    padding: 5px 0 5px 0px;
+    text-shadow: 0px 1px 0 white;
+    border-bottom: 1px solid #C8D5E3;
+}
+ 
+div.body h1 { margin-top: 0; font-size: 200%; }
+div.body h2 { font-size: 150%; }
+div.body h3 { font-size: 120%; }
+div.body h4 { font-size: 110%; }
+div.body h5 { font-size: 100%; }
+div.body h6 { font-size: 100%; }
+ 
+a.headerlink {
+    color: #c60f0f;
+    font-size: 0.8em;
+    padding: 0 4px 0 4px;
+    text-decoration: none;
+}
+ 
+a.headerlink:hover {
+    background-color: #c60f0f;
+    color: white;
+}
+ 
+div.body p, div.body dd, div.body li {
+    line-height: 1.8em;
+}
+ 
+div.admonition p.admonition-title + p {
+    display: inline;
+}
+
+div.highlight{
+    background-color: white;
+}
+
+div.note {
+    background-color: #eeeeee;
+    border: 1px solid #cccccc;
+}
+ 
+div.seealso {
+    background-color: #ffffcc;
+    border: 1px solid #ffff66;
+}
+ 
+div.topic {
+    background-color: #fafafa;
+    border-width: 0;
+}
+ 
+div.warning {
+    background-color: #ffe4e4;
+    border: 1px solid #ff6666;
+}
+ 
+p.admonition-title {
+    display: inline;
+}
+ 
+p.admonition-title:after {
+    content: ":";
+}
+ 
+pre {
+    padding: 10px;
+    background-color: #fafafa;
+    color: #222222;
+    line-height: 1.5em;
+    font-size: 1.1em;
+    margin: 1.5em 0 1.5em 0;
+    -webkit-box-shadow: 0px 0px 4px #d8d8d8;
+    -moz-box-shadow: 0px 0px 4px #d8d8d8;
+    box-shadow: 0px 0px 4px #d8d8d8;
+}
+ 
+tt {
+    color: #222222;
+    padding: 1px 2px;
+    font-size: 1.2em;
+    font-family: monospace;
+}
+
+#table-of-contents ul {
+    padding-left: 2em;
+}
+
diff --git a/vendor/distribute-0.6.35/docs/_theme/nature/static/pygments.css b/vendor/distribute-0.6.35/docs/_theme/nature/static/pygments.css
new file mode 100644
index 0000000000000000000000000000000000000000..652b76128b6a174f3407a50fff8735896f47d863
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/_theme/nature/static/pygments.css
@@ -0,0 +1,54 @@
+.c { color: #999988; font-style: italic } /* Comment */
+.k { font-weight: bold } /* Keyword */
+.o { font-weight: bold } /* Operator */
+.cm { color: #999988; font-style: italic } /* Comment.Multiline */
+.cp { color: #999999; font-weight: bold } /* Comment.preproc */
+.c1 { color: #999988; font-style: italic } /* Comment.Single */
+.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
+.ge { font-style: italic } /* Generic.Emph */
+.gr { color: #aa0000 } /* Generic.Error */
+.gh { color: #999999 } /* Generic.Heading */
+.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
+.go { color: #111 } /* Generic.Output */
+.gp { color: #555555 } /* Generic.Prompt */
+.gs { font-weight: bold } /* Generic.Strong */
+.gu { color: #aaaaaa } /* Generic.Subheading */
+.gt { color: #aa0000 } /* Generic.Traceback */
+.kc { font-weight: bold } /* Keyword.Constant */
+.kd { font-weight: bold } /* Keyword.Declaration */
+.kp { font-weight: bold } /* Keyword.Pseudo */
+.kr { font-weight: bold } /* Keyword.Reserved */
+.kt { color: #445588; font-weight: bold } /* Keyword.Type */
+.m { color: #009999 } /* Literal.Number */
+.s { color: #bb8844 } /* Literal.String */
+.na { color: #008080 } /* Name.Attribute */
+.nb { color: #999999 } /* Name.Builtin */
+.nc { color: #445588; font-weight: bold } /* Name.Class */
+.no { color: #ff99ff } /* Name.Constant */
+.ni { color: #800080 } /* Name.Entity */
+.ne { color: #990000; font-weight: bold } /* Name.Exception */
+.nf { color: #990000; font-weight: bold } /* Name.Function */
+.nn { color: #555555 } /* Name.Namespace */
+.nt { color: #000080 } /* Name.Tag */
+.nv { color: purple } /* Name.Variable */
+.ow { font-weight: bold } /* Operator.Word */
+.mf { color: #009999 } /* Literal.Number.Float */
+.mh { color: #009999 } /* Literal.Number.Hex */
+.mi { color: #009999 } /* Literal.Number.Integer */
+.mo { color: #009999 } /* Literal.Number.Oct */
+.sb { color: #bb8844 } /* Literal.String.Backtick */
+.sc { color: #bb8844 } /* Literal.String.Char */
+.sd { color: #bb8844 } /* Literal.String.Doc */
+.s2 { color: #bb8844 } /* Literal.String.Double */
+.se { color: #bb8844 } /* Literal.String.Escape */
+.sh { color: #bb8844 } /* Literal.String.Heredoc */
+.si { color: #bb8844 } /* Literal.String.Interpol */
+.sx { color: #bb8844 } /* Literal.String.Other */
+.sr { color: #808000 } /* Literal.String.Regex */
+.s1 { color: #bb8844 } /* Literal.String.Single */
+.ss { color: #bb8844 } /* Literal.String.Symbol */
+.bp { color: #999999 } /* Name.Builtin.Pseudo */
+.vc { color: #ff99ff } /* Name.Variable.Class */
+.vg { color: #ff99ff } /* Name.Variable.Global */
+.vi { color: #ff99ff } /* Name.Variable.Instance */
+.il { color: #009999 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/vendor/distribute-0.6.35/docs/_theme/nature/theme.conf b/vendor/distribute-0.6.35/docs/_theme/nature/theme.conf
new file mode 100644
index 0000000000000000000000000000000000000000..1cc40044646bb73870088ddc88543c58a3ca083e
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/_theme/nature/theme.conf
@@ -0,0 +1,4 @@
+[theme]
+inherit = basic
+stylesheet = nature.css
+pygments_style = tango
diff --git a/vendor/distribute-0.6.35/docs/conf.py b/vendor/distribute-0.6.35/docs/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..98380ba9d61a26d83c922c65e1f93abcccbd3cd4
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/conf.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8 -*-
+#
+# Distribute documentation build configuration file, created by
+# sphinx-quickstart on Fri Jul 17 14:22:37 2009.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.append(os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Distribute'
+copyright = u'2009-2011, The fellowship of the packaging'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.6.35'
+# The full version, including alpha/beta/rc tags.
+release = '0.6.35'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = ['_theme']
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+html_title = "Distribute documentation"
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+html_short_title = "Distribute"
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+html_sidebars = {'index': 'indexsidebar.html'}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+html_use_modindex = False
+
+# If false, no index is generated.
+html_use_index = False
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Distributedoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'Distribute.tex', ur'Distribute Documentation',
+   ur'The fellowship of the packaging', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/vendor/distribute-0.6.35/docs/easy_install.txt b/vendor/distribute-0.6.35/docs/easy_install.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9b4fcfbb6e673e9c97de8503d0deeab602f1594c
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/easy_install.txt
@@ -0,0 +1,1597 @@
+============
+Easy Install
+============
+
+Easy Install is a python module (``easy_install``) bundled with ``setuptools``
+that lets you automatically download, build, install, and manage Python
+packages.
+
+Please share your experiences with us! If you encounter difficulty installing
+a package, please contact us via the `distutils mailing list
+<http://mail.python.org/pipermail/distutils-sig/>`_.  (Note: please DO NOT send
+private email directly to the author of setuptools; it will be discarded.  The
+mailing list is a searchable archive of previously-asked and answered
+questions; you should begin your research there before reporting something as a
+bug -- and then do so via list discussion first.)
+
+(Also, if you'd like to learn about how you can use ``setuptools`` to make your
+own packages work better with EasyInstall, or provide EasyInstall-like features
+without requiring your users to use EasyInstall directly, you'll probably want
+to check out the full `setuptools`_ documentation as well.)
+
+.. contents:: **Table of Contents**
+
+
+Using "Easy Install"
+====================
+
+
+.. _installation instructions:
+
+Installing "Easy Install"
+-------------------------
+
+Please see the `setuptools PyPI page <http://pypi.python.org/pypi/setuptools>`_
+for download links and basic installation instructions for each of the
+supported platforms.
+
+You will need at least Python 2.3.5, or if you are on a 64-bit platform, Python
+2.4.  An ``easy_install`` script will be installed in the normal location for
+Python scripts on your platform.
+
+Note that the instructions on the setuptools PyPI page assume that you are
+are installling to Python's primary ``site-packages`` directory.  If this is
+not the case, you should consult the section below on `Custom Installation
+Locations`_ before installing.  (And, on Windows, you should not use the
+``.exe`` installer when installing to an alternate location.)
+
+Note that ``easy_install`` normally works by downloading files from the
+internet.  If you are behind an NTLM-based firewall that prevents Python
+programs from accessing the net directly, you may wish to first install and use
+the `APS proxy server <http://ntlmaps.sf.net/>`_, which lets you get past such
+firewalls in the same way that your web browser(s) do.
+
+(Alternately, if you do not wish easy_install to actually download anything, you
+can restrict it from doing so with the ``--allow-hosts`` option; see the
+sections on `restricting downloads with --allow-hosts`_ and `command-line
+options`_ for more details.)
+
+
+Troubleshooting
+~~~~~~~~~~~~~~~
+
+If EasyInstall/setuptools appears to install correctly, and you can run the
+``easy_install`` command but it fails with an ``ImportError``, the most likely
+cause is that you installed to a location other than ``site-packages``,
+without taking any of the steps described in the `Custom Installation
+Locations`_ section below.  Please see that section and follow the steps to
+make sure that your custom location will work correctly.  Then re-install.
+
+Similarly, if you can run ``easy_install``, and it appears to be installing
+packages, but then you can't import them, the most likely issue is that you
+installed EasyInstall correctly but are using it to install packages to a
+non-standard location that hasn't been properly prepared.  Again, see the
+section on `Custom Installation Locations`_ for more details.
+
+
+Windows Notes
+~~~~~~~~~~~~~
+
+On Windows, an ``easy_install.exe`` launcher will also be installed, so that
+you can just type ``easy_install`` as long as it's on your ``PATH``.  If typing
+``easy_install`` at the command prompt doesn't work, check to make sure your
+``PATH`` includes the appropriate ``C:\\Python2X\\Scripts`` directory.  On
+most current versions of Windows, you can change the ``PATH`` by right-clicking
+"My Computer", choosing "Properties" and selecting the "Advanced" tab, then
+clicking the "Environment Variables" button.  ``PATH`` will be in the "System
+Variables" section, and you will need to exit and restart your command shell
+(command.com, cmd.exe, bash, or other) for the change to take effect.  Be sure
+to add a ``;`` after the last item on ``PATH`` before adding the scripts
+directory to it.
+
+Note that instead of changing your ``PATH`` to include the Python scripts
+directory, you can also retarget the installation location for scripts so they
+go on a directory that's already on the ``PATH``.  For more information see the
+sections below on `Command-Line Options`_ and `Configuration Files`_.  You
+can pass command line options (such as ``--script-dir``) to
+``distribute_setup.py`` to control where ``easy_install.exe`` will be installed.
+
+
+
+Downloading and Installing a Package
+------------------------------------
+
+For basic use of ``easy_install``, you need only supply the filename or URL of
+a source distribution or .egg file (`Python Egg`__).
+
+__ http://peak.telecommunity.com/DevCenter/PythonEggs
+
+**Example 1**. Install a package by name, searching PyPI for the latest
+version, and automatically downloading, building, and installing it::
+
+    easy_install SQLObject
+
+**Example 2**. Install or upgrade a package by name and version by finding
+links on a given "download page"::
+
+    easy_install -f http://pythonpaste.org/package_index.html SQLObject
+
+**Example 3**. Download a source distribution from a specified URL,
+automatically building and installing it::
+
+    easy_install http://example.com/path/to/MyPackage-1.2.3.tgz
+
+**Example 4**. Install an already-downloaded .egg file::
+
+    easy_install /my_downloads/OtherPackage-3.2.1-py2.3.egg
+
+**Example 5**.  Upgrade an already-installed package to the latest version
+listed on PyPI::
+
+    easy_install --upgrade PyProtocols
+
+**Example 6**.  Install a source distribution that's already downloaded and
+extracted in the current directory (New in 0.5a9)::
+
+    easy_install .
+
+**Example 7**.  (New in 0.6a1) Find a source distribution or Subversion
+checkout URL for a package, and extract it or check it out to
+``~/projects/sqlobject`` (the name will always be in all-lowercase), where it
+can be examined or edited.  (The package will not be installed, but it can
+easily be installed with ``easy_install ~/projects/sqlobject``.  See `Editing
+and Viewing Source Packages`_ below for more info.)::
+
+    easy_install --editable --build-directory ~/projects SQLObject
+
+**Example 7**. (New in 0.6.11) Install a distribution within your home dir::
+
+    easy_install --user SQLAlchemy
+
+Easy Install accepts URLs, filenames, PyPI package names (i.e., ``distutils``
+"distribution" names), and package+version specifiers.  In each case, it will
+attempt to locate the latest available version that meets your criteria.
+
+When downloading or processing downloaded files, Easy Install recognizes
+distutils source distribution files with extensions of .tgz, .tar, .tar.gz,
+.tar.bz2, or .zip.  And of course it handles already-built .egg
+distributions as well as ``.win32.exe`` installers built using distutils.
+
+By default, packages are installed to the running Python installation's
+``site-packages`` directory, unless you provide the ``-d`` or ``--install-dir``
+option to specify an alternative directory, or specify an alternate location
+using distutils configuration files.  (See `Configuration Files`_, below.)
+
+By default, any scripts included with the package are installed to the running
+Python installation's standard script installation location.  However, if you
+specify an installation directory via the command line or a config file, then
+the default directory for installing scripts will be the same as the package
+installation directory, to ensure that the script will have access to the
+installed package.  You can override this using the ``-s`` or ``--script-dir``
+option.
+
+Installed packages are added to an ``easy-install.pth`` file in the install
+directory, so that Python will always use the most-recently-installed version
+of the package.  If you would like to be able to select which version to use at
+runtime, you should use the ``-m`` or ``--multi-version`` option.
+
+
+Upgrading a Package
+-------------------
+
+You don't need to do anything special to upgrade a package: just install the
+new version, either by requesting a specific version, e.g.::
+
+    easy_install "SomePackage==2.0"
+
+a version greater than the one you have now::
+
+    easy_install "SomePackage>2.0"
+
+using the upgrade flag, to find the latest available version on PyPI::
+
+    easy_install --upgrade SomePackage
+
+or by using a download page, direct download URL, or package filename::
+
+    easy_install -f http://example.com/downloads ExamplePackage
+
+    easy_install http://example.com/downloads/ExamplePackage-2.0-py2.4.egg
+
+    easy_install my_downloads/ExamplePackage-2.0.tgz
+
+If you're using ``-m`` or ``--multi-version`` , using the ``require()``
+function at runtime automatically selects the newest installed version of a
+package that meets your version criteria.  So, installing a newer version is
+the only step needed to upgrade such packages.
+
+If you're installing to a directory on PYTHONPATH, or a configured "site"
+directory (and not using ``-m``), installing a package automatically replaces
+any previous version in the ``easy-install.pth`` file, so that Python will
+import the most-recently installed version by default.  So, again, installing
+the newer version is the only upgrade step needed.
+
+If you haven't suppressed script installation (using ``--exclude-scripts`` or
+``-x``), then the upgraded version's scripts will be installed, and they will
+be automatically patched to ``require()`` the corresponding version of the
+package, so that you can use them even if they are installed in multi-version
+mode.
+
+``easy_install`` never actually deletes packages (unless you're installing a
+package with the same name and version number as an existing package), so if
+you want to get rid of older versions of a package, please see `Uninstalling
+Packages`_, below.
+
+
+Changing the Active Version
+---------------------------
+
+If you've upgraded a package, but need to revert to a previously-installed
+version, you can do so like this::
+
+    easy_install PackageName==1.2.3
+
+Where ``1.2.3`` is replaced by the exact version number you wish to switch to.
+If a package matching the requested name and version is not already installed
+in a directory on ``sys.path``, it will be located via PyPI and installed.
+
+If you'd like to switch to the latest installed version of ``PackageName``, you
+can do so like this::
+
+    easy_install PackageName
+
+This will activate the latest installed version.  (Note: if you have set any
+``find_links`` via distutils configuration files, those download pages will be
+checked for the latest available version of the package, and it will be
+downloaded and installed if it is newer than your current version.)
+
+Note that changing the active version of a package will install the newly
+active version's scripts, unless the ``--exclude-scripts`` or ``-x`` option is
+specified.
+
+
+Uninstalling Packages
+---------------------
+
+If you have replaced a package with another version, then you can just delete
+the package(s) you don't need by deleting the PackageName-versioninfo.egg file
+or directory (found in the installation directory).
+
+If you want to delete the currently installed version of a package (or all
+versions of a package), you should first run::
+
+    easy_install -m PackageName
+
+This will ensure that Python doesn't continue to search for a package you're
+planning to remove. After you've done this, you can safely delete the .egg
+files or directories, along with any scripts you wish to remove.
+
+
+Managing Scripts
+----------------
+
+Whenever you install, upgrade, or change versions of a package, EasyInstall
+automatically installs the scripts for the selected package version, unless
+you tell it not to with ``-x`` or ``--exclude-scripts``.  If any scripts in
+the script directory have the same name, they are overwritten.
+
+Thus, you do not normally need to manually delete scripts for older versions of
+a package, unless the newer version of the package does not include a script
+of the same name.  However, if you are completely uninstalling a package, you
+may wish to manually delete its scripts.
+
+EasyInstall's default behavior means that you can normally only run scripts
+from one version of a package at a time.  If you want to keep multiple versions
+of a script available, however, you can simply use the ``--multi-version`` or
+``-m`` option, and rename the scripts that EasyInstall creates.  This works
+because EasyInstall installs scripts as short code stubs that ``require()`` the
+matching version of the package the script came from, so renaming the script
+has no effect on what it executes.
+
+For example, suppose you want to use two versions of the ``rst2html`` tool
+provided by the `docutils <http://docutils.sf.net/>`_ package.  You might
+first install one version::
+
+    easy_install -m docutils==0.3.9
+
+then rename the ``rst2html.py`` to ``r2h_039``, and install another version::
+
+    easy_install -m docutils==0.3.10
+
+This will create another ``rst2html.py`` script, this one using docutils
+version 0.3.10 instead of 0.3.9.  You now have two scripts, each using a
+different version of the package.  (Notice that we used ``-m`` for both
+installations, so that Python won't lock us out of using anything but the most
+recently-installed version of the package.)
+
+
+
+Tips & Techniques
+-----------------
+
+
+Multiple Python Versions
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+As of version 0.6a11, EasyInstall installs itself under two names:
+``easy_install`` and ``easy_install-N.N``, where ``N.N`` is the Python version
+used to install it.  Thus, if you install EasyInstall for both Python 2.3 and
+2.4, you can use the ``easy_install-2.3`` or ``easy_install-2.4`` scripts to
+install packages for Python 2.3 or 2.4, respectively.
+
+Also, if you're working with Python version 2.4 or higher, you can run Python
+with ``-m easy_install`` to run that particular Python version's
+``easy_install`` command.
+
+
+Restricting Downloads with ``--allow-hosts``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can use the ``--allow-hosts`` (``-H``) option to restrict what domains
+EasyInstall will look for links and downloads on.  ``--allow-hosts=None``
+prevents downloading altogether.  You can also use wildcards, for example
+to restrict downloading to hosts in your own intranet.  See the section below
+on `Command-Line Options`_ for more details on the ``--allow-hosts`` option.
+
+By default, there are no host restrictions in effect, but you can change this
+default by editing the appropriate `configuration files`_ and adding:
+
+.. code-block:: ini
+
+    [easy_install]
+    allow_hosts = *.myintranet.example.com,*.python.org
+
+The above example would then allow downloads only from hosts in the
+``python.org`` and ``myintranet.example.com`` domains, unless overridden on the
+command line.
+
+
+Installing on Un-networked Machines
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Just copy the eggs or source packages you need to a directory on the target
+machine, then use the ``-f`` or ``--find-links`` option to specify that
+directory's location.  For example::
+
+    easy_install -H None -f somedir SomePackage
+
+will attempt to install SomePackage using only eggs and source packages found
+in ``somedir`` and disallowing all remote access.  You should of course make
+sure you have all of SomePackage's dependencies available in somedir.
+
+If you have another machine of the same operating system and library versions
+(or if the packages aren't platform-specific), you can create the directory of
+eggs using a command like this::
+
+    easy_install -zmaxd somedir SomePackage
+
+This will tell EasyInstall to put zipped eggs or source packages for
+SomePackage and all its dependencies into ``somedir``, without creating any
+scripts or .pth files.  You can then copy the contents of ``somedir`` to the
+target machine.  (``-z`` means zipped eggs, ``-m`` means multi-version, which
+prevents .pth files from being used, ``-a`` means to copy all the eggs needed,
+even if they're installed elsewhere on the machine, and ``-d`` indicates the
+directory to place the eggs in.)
+
+You can also build the eggs from local development packages that were installed
+with the ``setup.py develop`` command, by including the ``-l`` option, e.g.::
+
+    easy_install -zmaxld somedir SomePackage
+
+This will use locally-available source distributions to build the eggs.
+
+
+Packaging Others' Projects As Eggs
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Need to distribute a package that isn't published in egg form?  You can use
+EasyInstall to build eggs for a project.  You'll want to use the ``--zip-ok``,
+``--exclude-scripts``, and possibly ``--no-deps`` options (``-z``, ``-x`` and
+``-N``, respectively).  Use ``-d`` or ``--install-dir`` to specify the location
+where you'd like the eggs placed.  By placing them in a directory that is
+published to the web, you can then make the eggs available for download, either
+in an intranet or to the internet at large.
+
+If someone distributes a package in the form of a single ``.py`` file, you can
+wrap it in an egg by tacking an ``#egg=name-version`` suffix on the file's URL.
+So, something like this::
+
+    easy_install -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
+
+will install the package as an egg, and this::
+
+    easy_install -zmaxd. \
+        -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo
+
+will create a ``.egg`` file in the current directory.
+
+
+Creating your own Package Index
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In addition to local directories and the Python Package Index, EasyInstall can
+find download links on most any web page whose URL is given to the ``-f``
+(``--find-links``) option.  In the simplest case, you can simply have a web
+page with links to eggs or Python source packages, even an automatically
+generated directory listing (such as the Apache web server provides).
+
+If you are setting up an intranet site for package downloads, you may want to
+configure the target machines to use your download site by default, adding
+something like this to their `configuration files`_:
+
+.. code-block:: ini
+
+    [easy_install]
+    find_links = http://mypackages.example.com/somedir/
+                 http://turbogears.org/download/
+                 http://peak.telecommunity.com/dist/
+
+As you can see, you can list multiple URLs separated by whitespace, continuing
+on multiple lines if necessary (as long as the subsequent lines are indented.
+
+If you are more ambitious, you can also create an entirely custom package index
+or PyPI mirror.  See the ``--index-url`` option under `Command-Line Options`_,
+below, and also the section on `Package Index "API"`_.
+
+
+Password-Protected Sites
+------------------------
+
+If a site you want to download from is password-protected using HTTP "Basic"
+authentication, you can specify your credentials in the URL, like so::
+
+    http://some_userid:some_password@some.example.com/some_path/
+
+You can do this with both index page URLs and direct download URLs.  As long
+as any HTML pages read by easy_install use *relative* links to point to the
+downloads, the same user ID and password will be used to do the downloading.
+
+
+Controlling Build Options
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+EasyInstall respects standard distutils `Configuration Files`_, so you can use
+them to configure build options for packages that it installs from source.  For
+example, if you are on Windows using the MinGW compiler, you can configure the
+default compiler by putting something like this:
+
+.. code-block:: ini
+
+    [build]
+    compiler = mingw32
+
+into the appropriate distutils configuration file.  In fact, since this is just
+normal distutils configuration, it will affect any builds using that config
+file, not just ones done by EasyInstall.  For example, if you add those lines
+to ``distutils.cfg`` in the ``distutils`` package directory, it will be the
+default compiler for *all* packages you build.  See `Configuration Files`_
+below for a list of the standard configuration file locations, and links to
+more documentation on using distutils configuration files.
+
+
+Editing and Viewing Source Packages
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Sometimes a package's source distribution  contains additional documentation,
+examples, configuration files, etc., that are not part of its actual code.  If
+you want to be able to examine these files, you can use the ``--editable``
+option to EasyInstall, and EasyInstall will look for a source distribution
+or Subversion URL for the package, then download and extract it or check it out
+as a subdirectory of the ``--build-directory`` you specify.  If you then wish
+to install the package after editing or configuring it, you can do so by
+rerunning EasyInstall with that directory as the target.
+
+Note that using ``--editable`` stops EasyInstall from actually building or
+installing the package; it just finds, obtains, and possibly unpacks it for
+you.  This allows you to make changes to the package if necessary, and to
+either install it in development mode using ``setup.py develop`` (if the
+package uses setuptools, that is), or by running ``easy_install projectdir``
+(where ``projectdir`` is the subdirectory EasyInstall created for the
+downloaded package.
+
+In order to use ``--editable`` (``-e`` for short), you *must* also supply a
+``--build-directory`` (``-b`` for short).  The project will be placed in a
+subdirectory of the build directory.  The subdirectory will have the same
+name as the project itself, but in all-lowercase.  If a file or directory of
+that name already exists, EasyInstall will print an error message and exit.
+
+Also, when using ``--editable``, you cannot use URLs or filenames as arguments.
+You *must* specify project names (and optional version requirements) so that
+EasyInstall knows what directory name(s) to create.  If you need to force
+EasyInstall to use a particular URL or filename, you should specify it as a
+``--find-links`` item (``-f`` for short), and then also specify
+the project name, e.g.::
+
+    easy_install -eb ~/projects \
+     -fhttp://prdownloads.sourceforge.net/ctypes/ctypes-0.9.6.tar.gz?download \
+     ctypes==0.9.6
+
+
+Dealing with Installation Conflicts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+(NOTE: As of 0.6a11, this section is obsolete; it is retained here only so that
+people using older versions of EasyInstall can consult it.  As of version
+0.6a11, installation conflicts are handled automatically without deleting the
+old or system-installed packages, and without ignoring the issue.  Instead,
+eggs are automatically shifted to the front of ``sys.path`` using special
+code added to the ``easy-install.pth`` file.  So, if you are using version
+0.6a11 or better of setuptools, you do not need to worry about conflicts,
+and the following issues do not apply to you.)
+
+EasyInstall installs distributions in a "managed" way, such that each
+distribution can be independently activated or deactivated on ``sys.path``.
+However, packages that were not installed by EasyInstall are "unmanaged",
+in that they usually live all in one directory and cannot be independently
+activated or deactivated.
+
+As a result, if you are using EasyInstall to upgrade an existing package, or
+to install a package with the same name as an existing package, EasyInstall
+will warn you of the conflict.  (This is an improvement over ``setup.py
+install``, becuase the ``distutils`` just install new packages on top of old
+ones, possibly combining two unrelated packages or leaving behind modules that
+have been deleted in the newer version of the package.)
+
+By default, EasyInstall will stop the installation if it detects a conflict
+between an existing, "unmanaged" package, and a module or package in any of
+the distributions you're installing.  It will display a list of all of the
+existing files and directories that would need to be deleted for the new
+package to be able to function correctly.  You can then either delete these
+conflicting files and directories yourself and re-run EasyInstall, or you can
+just use the ``--delete-conflicting`` or ``--ignore-conflicts-at-my-risk``
+options, as described under `Command-Line Options`_, below.
+
+Of course, once you've replaced all of your existing "unmanaged" packages with
+versions managed by EasyInstall, you won't have any more conflicts to worry
+about!
+
+
+Compressed Installation
+~~~~~~~~~~~~~~~~~~~~~~~
+
+EasyInstall tries to install packages in zipped form, if it can.  Zipping
+packages can improve Python's overall import performance if you're not using
+the ``--multi-version`` option, because Python processes zipfile entries on
+``sys.path`` much faster than it does directories.
+
+As of version 0.5a9, EasyInstall analyzes packages to determine whether they
+can be safely installed as a zipfile, and then acts on its analysis.  (Previous
+versions would not install a package as a zipfile unless you used the
+``--zip-ok`` option.)
+
+The current analysis approach is fairly conservative; it currenly looks for:
+
+ * Any use of the ``__file__`` or ``__path__`` variables (which should be
+   replaced with ``pkg_resources`` API calls)
+
+ * Possible use of ``inspect`` functions that expect to manipulate source files
+   (e.g. ``inspect.getsource()``)
+
+ * Top-level modules that might be scripts used with ``python -m`` (Python 2.4)
+
+If any of the above are found in the package being installed, EasyInstall will
+assume that the package cannot be safely run from a zipfile, and unzip it to
+a directory instead.  You can override this analysis with the ``-zip-ok`` flag,
+which will tell EasyInstall to install the package as a zipfile anyway.  Or,
+you can use the ``--always-unzip`` flag, in which case EasyInstall will always
+unzip, even if its analysis says the package is safe to run as a zipfile.
+
+Normally, however, it is simplest to let EasyInstall handle the determination
+of whether to zip or unzip, and only specify overrides when needed to work
+around a problem.  If you find you need to override EasyInstall's guesses, you
+may want to contact the package author and the EasyInstall maintainers, so that
+they can make appropriate changes in future versions.
+
+(Note: If a package uses ``setuptools`` in its setup script, the package author
+has the option to declare the package safe or unsafe for zipped usage via the
+``zip_safe`` argument to ``setup()``.  If the package author makes such a
+declaration, EasyInstall believes the package's author and does not perform its
+own analysis.  However, your command-line option, if any, will still override
+the package author's choice.)
+
+
+Reference Manual
+================
+
+Configuration Files
+-------------------
+
+(New in 0.4a2)
+
+You may specify default options for EasyInstall using the standard
+distutils configuration files, under the command heading ``easy_install``.
+EasyInstall will look first for a ``setup.cfg`` file in the current directory,
+then a ``~/.pydistutils.cfg`` or ``$HOME\\pydistutils.cfg`` (on Unix-like OSes
+and Windows, respectively), and finally a ``distutils.cfg`` file in the
+``distutils`` package directory.  Here's a simple example:
+
+.. code-block:: ini
+
+    [easy_install]
+
+    # set the default location to install packages
+    install_dir = /home/me/lib/python
+
+    # Notice that indentation can be used to continue an option
+    # value; this is especially useful for the "--find-links"
+    # option, which tells easy_install to use download links on
+    # these pages before consulting PyPI:
+    #
+    find_links = http://sqlobject.org/
+                 http://peak.telecommunity.com/dist/
+
+In addition to accepting configuration for its own options under
+``[easy_install]``, EasyInstall also respects defaults specified for other
+distutils commands.  For example, if you don't set an ``install_dir`` for
+``[easy_install]``, but *have* set an ``install_lib`` for the ``[install]``
+command, this will become EasyInstall's default installation directory.  Thus,
+if you are already using distutils configuration files to set default install
+locations, build options, etc., EasyInstall will respect your existing settings
+until and unless you override them explicitly in an ``[easy_install]`` section.
+
+For more information, see also the current Python documentation on the `use and
+location of distutils configuration files <http://docs.python.org/inst/config-syntax.html>`_.
+
+Notice that ``easy_install`` will use the ``setup.cfg`` from the current
+working directory only if it was triggered from ``setup.py`` through the
+``install_requires`` option. The standalone command will not use that file.
+
+Command-Line Options
+--------------------
+
+``--zip-ok, -z``
+    Install all packages as zip files, even if they are marked as unsafe for
+    running as a zipfile.  This can be useful when EasyInstall's analysis
+    of a non-setuptools package is too conservative, but keep in mind that
+    the package may not work correctly.  (Changed in 0.5a9; previously this
+    option was required in order for zipped installation to happen at all.)
+
+``--always-unzip, -Z``
+    Don't install any packages as zip files, even if the packages are marked
+    as safe for running as a zipfile.  This can be useful if a package does
+    something unsafe, but not in a way that EasyInstall can easily detect.
+    EasyInstall's default analysis is currently very conservative, however, so
+    you should only use this option if you've had problems with a particular
+    package, and *after* reporting the problem to the package's maintainer and
+    to the EasyInstall maintainers.
+
+    (Note: the ``-z/-Z`` options only affect the installation of newly-built
+    or downloaded packages that are not already installed in the target
+    directory; if you want to convert an existing installed version from
+    zipped to unzipped or vice versa, you'll need to delete the existing
+    version first, and re-run EasyInstall.)
+
+``--multi-version, -m``
+    "Multi-version" mode. Specifying this option prevents ``easy_install`` from
+    adding an ``easy-install.pth`` entry for the package being installed, and
+    if an entry for any version the package already exists, it will be removed
+    upon successful installation. In multi-version mode, no specific version of
+    the package is available for importing, unless you use
+    ``pkg_resources.require()`` to put it on ``sys.path``. This can be as
+    simple as::
+
+        from pkg_resources import require
+        require("SomePackage", "OtherPackage", "MyPackage")
+
+    which will put the latest installed version of the specified packages on
+    ``sys.path`` for you. (For more advanced uses, like selecting specific
+    versions and enabling optional dependencies, see the ``pkg_resources`` API
+    doc.)
+
+    Changed in 0.6a10: this option is no longer silently enabled when
+    installing to a non-PYTHONPATH, non-"site" directory.  You must always
+    explicitly use this option if you want it to be active.
+
+``--upgrade, -U``   (New in 0.5a4)
+    By default, EasyInstall only searches online if a project/version
+    requirement can't be met by distributions already installed
+    on sys.path or the installation directory.  However, if you supply the
+    ``--upgrade`` or ``-U`` flag, EasyInstall will always check the package
+    index and ``--find-links`` URLs before selecting a version to install.  In
+    this way, you can force EasyInstall to use the latest available version of
+    any package it installs (subject to any version requirements that might
+    exclude such later versions).
+
+``--install-dir=DIR, -d DIR``
+    Set the installation directory. It is up to you to ensure that this
+    directory is on ``sys.path`` at runtime, and to use
+    ``pkg_resources.require()`` to enable the installed package(s) that you
+    need.
+
+    (New in 0.4a2) If this option is not directly specified on the command line
+    or in a distutils configuration file, the distutils default installation
+    location is used.  Normally, this would be the ``site-packages`` directory,
+    but if you are using distutils configuration files, setting things like
+    ``prefix`` or ``install_lib``, then those settings are taken into
+    account when computing the default installation directory, as is the
+    ``--prefix`` option.
+
+``--script-dir=DIR, -s DIR``
+    Set the script installation directory.  If you don't supply this option
+    (via the command line or a configuration file), but you *have* supplied
+    an ``--install-dir`` (via command line or config file), then this option
+    defaults to the same directory, so that the scripts will be able to find
+    their associated package installation.  Otherwise, this setting defaults
+    to the location where the distutils would normally install scripts, taking
+    any distutils configuration file settings into account.
+
+``--exclude-scripts, -x``
+    Don't install scripts.  This is useful if you need to install multiple
+    versions of a package, but do not want to reset the version that will be
+    run by scripts that are already installed.
+
+``--user`` (New in 0.6.11)
+    Use the the user-site-packages as specified in :pep:`370`
+    instead of the global site-packages.
+
+``--always-copy, -a``   (New in 0.5a4)
+    Copy all needed distributions to the installation directory, even if they
+    are already present in a directory on sys.path.  In older versions of
+    EasyInstall, this was the default behavior, but now you must explicitly
+    request it.  By default, EasyInstall will no longer copy such distributions
+    from other sys.path directories to the installation directory, unless you
+    explicitly gave the distribution's filename on the command line.
+
+    Note that as of 0.6a10, using this option excludes "system" and
+    "development" eggs from consideration because they can't be reliably
+    copied.  This may cause EasyInstall to choose an older version of a package
+    than what you expected, or it may cause downloading and installation of a
+    fresh copy of something that's already installed.  You will see warning
+    messages for any eggs that EasyInstall skips, before it falls back to an
+    older version or attempts to download a fresh copy.
+
+``--find-links=URLS_OR_FILENAMES, -f URLS_OR_FILENAMES``
+    Scan the specified "download pages" or directories for direct links to eggs
+    or other distributions.  Any existing file or directory names or direct
+    download URLs are immediately added to EasyInstall's search cache, and any
+    indirect URLs (ones that don't point to eggs or other recognized archive
+    formats) are added to a list of additional places to search for download
+    links.  As soon as EasyInstall has to go online to find a package (either
+    because it doesn't exist locally, or because ``--upgrade`` or ``-U`` was
+    used), the specified URLs will be downloaded and scanned for additional
+    direct links.
+
+    Eggs and archives found by way of ``--find-links`` are only downloaded if
+    they are needed to meet a requirement specified on the command line; links
+    to unneeded packages are ignored.
+
+    If all requested packages can be found using links on the specified
+    download pages, the Python Package Index will not be consulted unless you
+    also specified the ``--upgrade`` or ``-U`` option.
+
+    (Note: if you want to refer to a local HTML file containing links, you must
+    use a ``file:`` URL, as filenames that do not refer to a directory, egg, or
+    archive are ignored.)
+
+    You may specify multiple URLs or file/directory names with this option,
+    separated by whitespace.  Note that on the command line, you will probably
+    have to surround the URL list with quotes, so that it is recognized as a
+    single option value.  You can also specify URLs in a configuration file;
+    see `Configuration Files`_, above.
+
+    Changed in 0.6a10: previously all URLs and directories passed to this
+    option were scanned as early as possible, but from 0.6a10 on, only
+    directories and direct archive links are scanned immediately; URLs are not
+    retrieved unless a package search was already going to go online due to a
+    package not being available locally, or due to the use of the ``--update``
+    or ``-U`` option.
+
+``--no-find-links`` Blocks the addition of any link. (New in Distribute 0.6.11)
+    This is useful if you want to avoid adding links defined in a project
+    easy_install is installing (wether it's a requested project or a
+    dependency.). When used, ``--find-links`` is ignored.
+
+``--delete-conflicting, -D`` (Removed in 0.6a11)
+    (As of 0.6a11, this option is no longer necessary; please do not use it!)
+
+    If you are replacing a package that was previously installed *without*
+    using EasyInstall, the old version may end up on ``sys.path`` before the
+    version being installed with EasyInstall.  EasyInstall will normally abort
+    the installation of a package if it detects such a conflict, and ask you to
+    manually remove the conflicting files or directories.  If you specify this
+    option, however, EasyInstall will attempt to delete the files or
+    directories itself, and then proceed with the installation.
+
+``--ignore-conflicts-at-my-risk`` (Removed in 0.6a11)
+    (As of 0.6a11, this option is no longer necessary; please do not use it!)
+
+    Ignore conflicting packages and proceed with installation anyway, even
+    though it means the package probably won't work properly.  If the
+    conflicting package is in a directory you can't write to, this may be your
+    only option, but you will need to take more invasive measures to get the
+    installed package to work, like manually adding it to ``PYTHONPATH`` or to
+    ``sys.path`` at runtime.
+
+``--index-url=URL, -i URL`` (New in 0.4a1; default changed in 0.6c7)
+    Specifies the base URL of the Python Package Index.  The default is
+    http://pypi.python.org/simple if not specified.  When a package is requested
+    that is not locally available or linked from a ``--find-links`` download
+    page, the package index will be searched for download pages for the needed
+    package, and those download pages will be searched for links to download
+    an egg or source distribution.
+
+``--editable, -e`` (New in 0.6a1)
+    Only find and download source distributions for the specified projects,
+    unpacking them to subdirectories of the specified ``--build-directory``.
+    EasyInstall will not actually build or install the requested projects or
+    their dependencies; it will just find and extract them for you.  See
+    `Editing and Viewing Source Packages`_ above for more details.
+
+``--build-directory=DIR, -b DIR`` (UPDATED in 0.6a1)
+    Set the directory used to build source packages.  If a package is built
+    from a source distribution or checkout, it will be extracted to a
+    subdirectory of the specified directory.  The subdirectory will have the
+    same name as the extracted distribution's project, but in all-lowercase.
+    If a file or directory of that name already exists in the given directory,
+    a warning will be printed to the console, and the build will take place in
+    a temporary directory instead.
+
+    This option is most useful in combination with the ``--editable`` option,
+    which forces EasyInstall to *only* find and extract (but not build and
+    install) source distributions.  See `Editing and Viewing Source Packages`_,
+    above, for more information.
+
+``--verbose, -v, --quiet, -q`` (New in 0.4a4)
+    Control the level of detail of EasyInstall's progress messages.  The
+    default detail level is "info", which prints information only about
+    relatively time-consuming operations like running a setup script, unpacking
+    an archive, or retrieving a URL.  Using ``-q`` or ``--quiet`` drops the
+    detail level to "warn", which will only display installation reports,
+    warnings, and errors.  Using ``-v`` or ``--verbose`` increases the detail
+    level to include individual file-level operations, link analysis messages,
+    and distutils messages from any setup scripts that get run.  If you include
+    the ``-v`` option more than once, the second and subsequent uses are passed
+    down to any setup scripts, increasing the verbosity of their reporting as
+    well.
+
+``--dry-run, -n`` (New in 0.4a4)
+    Don't actually install the package or scripts.  This option is passed down
+    to any setup scripts run, so packages should not actually build either.
+    This does *not* skip downloading, nor does it skip extracting source
+    distributions to a temporary/build directory.
+
+``--optimize=LEVEL``, ``-O LEVEL`` (New in 0.4a4)
+    If you are installing from a source distribution, and are *not* using the
+    ``--zip-ok`` option, this option controls the optimization level for
+    compiling installed ``.py`` files to ``.pyo`` files.  It does not affect
+    the compilation of modules contained in ``.egg`` files, only those in
+    ``.egg`` directories.  The optimization level can be set to 0, 1, or 2;
+    the default is 0 (unless it's set under ``install`` or ``install_lib`` in
+    one of your distutils configuration files).
+
+``--record=FILENAME``  (New in 0.5a4)
+    Write a record of all installed files to FILENAME.  This is basically the
+    same as the same option for the standard distutils "install" command, and
+    is included for compatibility with tools that expect to pass this option
+    to "setup.py install".
+
+``--site-dirs=DIRLIST, -S DIRLIST``   (New in 0.6a1)
+    Specify one or more custom "site" directories (separated by commas).
+    "Site" directories are directories where ``.pth`` files are processed, such
+    as the main Python ``site-packages`` directory.  As of 0.6a10, EasyInstall
+    automatically detects whether a given directory processes ``.pth`` files
+    (or can be made to do so), so you should not normally need to use this
+    option.  It is is now only necessary if you want to override EasyInstall's
+    judgment and force an installation directory to be treated as if it
+    supported ``.pth`` files.
+
+``--no-deps, -N``  (New in 0.6a6)
+    Don't install any dependencies.  This is intended as a convenience for
+    tools that wrap eggs in a platform-specific packaging system.  (We don't
+    recommend that you use it for anything else.)
+
+``--allow-hosts=PATTERNS, -H PATTERNS``   (New in 0.6a6)
+    Restrict downloading and spidering to hosts matching the specified glob
+    patterns.  E.g. ``-H *.python.org`` restricts web access so that only
+    packages listed and downloadable from machines in the ``python.org``
+    domain.  The glob patterns must match the *entire* user/host/port section of
+    the target URL(s).  For example, ``*.python.org`` will NOT accept a URL
+    like ``http://python.org/foo`` or ``http://www.python.org:8080/``.
+    Multiple patterns can be specified by separting them with commas.  The
+    default pattern is ``*``, which matches anything.
+
+    In general, this option is mainly useful for blocking EasyInstall's web
+    access altogether (e.g. ``-Hlocalhost``), or to restrict it to an intranet
+    or other trusted site.  EasyInstall will do the best it can to satisfy
+    dependencies given your host restrictions, but of course can fail if it
+    can't find suitable packages.  EasyInstall displays all blocked URLs, so
+    that you can adjust your ``--allow-hosts`` setting if it is more strict
+    than you intended.  Some sites may wish to define a restrictive default
+    setting for this option in their `configuration files`_, and then manually
+    override the setting on the command line as needed.
+
+``--prefix=DIR`` (New in 0.6a10)
+    Use the specified directory as a base for computing the default
+    installation and script directories.  On Windows, the resulting default
+    directories will be ``prefix\\Lib\\site-packages`` and ``prefix\\Scripts``,
+    while on other platforms the defaults will be
+    ``prefix/lib/python2.X/site-packages`` (with the appropriate version
+    substituted) for libraries and ``prefix/bin`` for scripts.
+
+    Note that the ``--prefix`` option only sets the *default* installation and
+    script directories, and does not override the ones set on the command line
+    or in a configuration file.
+
+``--local-snapshots-ok, -l`` (New in 0.6c6)
+    Normally, EasyInstall prefers to only install *released* versions of
+    projects, not in-development ones, because such projects may not
+    have a currently-valid version number.  So, it usually only installs them
+    when their ``setup.py`` directory is explicitly passed on the command line.
+
+    However, if this option is used, then any in-development projects that were
+    installed using the ``setup.py develop`` command, will be used to build
+    eggs, effectively upgrading the "in-development" project to a snapshot
+    release.  Normally, this option is used only in conjunction with the
+    ``--always-copy`` option to create a distributable snapshot of every egg
+    needed to run an application.
+
+    Note that if you use this option, you must make sure that there is a valid
+    version number (such as an SVN revision number tag) for any in-development
+    projects that may be used, as otherwise EasyInstall may not be able to tell
+    what version of the project is "newer" when future installations or
+    upgrades are attempted.
+
+
+.. _non-root installation:
+
+Custom Installation Locations
+-----------------------------
+
+By default, EasyInstall installs python packages into Python's main ``site-packages`` directory,
+and manages them using a custom ``.pth`` file in that same directory.
+
+Very often though, a user or developer wants ``easy_install`` to install and manage python packages
+in an alternative location, usually for one of 3 reasons:
+
+1. They don't have access to write to the main Python site-packages directory.
+
+2. They want a user-specific stash of packages, that is not visible to other users.
+
+3. They want to isolate a set of packages to a specific python application, usually to minimize
+   the possibility of version conflicts.
+
+Historically, there have been many approaches to achieve custom installation.
+The following section lists only the easiest and most relevant approaches [1]_.
+
+`Use the "--user" option`_
+
+`Use the "--user" option and customize "PYTHONUSERBASE"`_
+
+`Use "virtualenv"`_
+
+.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by `PEP-370`_ in Python 2.6.
+
+.. _PEP-370: http://www.python.org/dev/peps/pep-0370/
+
+
+Use the "--user" option
+~~~~~~~~~~~~~~~~~~~~~~~
+With Python 2.6 came the User scheme for installation, which means that all
+python distributions support an alternative install location that is specific to a user [2]_ [3]_.
+The Default location for each OS is explained in the python documentation
+for the ``site.USER_BASE`` variable.  This mode of installation can be turned on by
+specifying the ``--user`` option to ``setup.py install`` or ``easy_install``.
+This approach serves the need to have a user-specific stash of packages.
+
+.. [2] Prior to Python2.6, Mac OS X offered a form of the User scheme. That is now subsumed into the User scheme introduced in Python 2.6.
+.. [3] Prior to the User scheme, there was the Home scheme, which is still available, but requires more effort than the User scheme to get packages recognized.
+
+Use the "--user" option and customize "PYTHONUSERBASE"
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+The User scheme install location can be customized by setting the ``PYTHONUSERBASE`` environment
+variable, which updates the value of ``site.USER_BASE``.  To isolate packages to a specific
+application, simply set the OS environment of that application to a specific value of
+``PYTHONUSERBASE``, that contains just those packages.
+
+Use "virtualenv"
+~~~~~~~~~~~~~~~~
+"virtualenv" is a 3rd-party python package that effectively "clones" a python installation, thereby
+creating an isolated location to intall packages.  The evolution of "virtualenv" started before the existence
+of the User installation scheme.  "virtualenv" provides a version of ``easy_install`` that is
+scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features
+that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages.
+
+Please refer to the `virtualenv`_ documentation for more details.
+
+.. _virtualenv: http://pypi.python.org/pypi/virtualenv
+
+
+
+Package Index "API"
+-------------------
+
+Custom package indexes (and PyPI) must follow the following rules for
+EasyInstall to be able to look up and download packages:
+
+1. Except where stated otherwise, "pages" are HTML or XHTML, and "links"
+   refer to ``href`` attributes.
+
+2. Individual project version pages' URLs must be of the form
+   ``base/projectname/version``, where ``base`` is the package index's base URL.
+
+3. Omitting the ``/version`` part of a project page's URL (but keeping the
+   trailing ``/``) should result in a page that is either:
+
+   a) The single active version of that project, as though the version had been
+      explicitly included, OR
+
+   b) A page with links to all of the active version pages for that project.
+
+4. Individual project version pages should contain direct links to downloadable
+   distributions where possible.  It is explicitly permitted for a project's
+   "long_description" to include URLs, and these should be formatted as HTML
+   links by the package index, as EasyInstall does no special processing to
+   identify what parts of a page are index-specific and which are part of the
+   project's supplied description.
+
+5. Where available, MD5 information should be added to download URLs by
+   appending a fragment identifier of the form ``#md5=...``, where ``...`` is
+   the 32-character hex MD5 digest.  EasyInstall will verify that the
+   downloaded file's MD5 digest matches the given value.
+
+6. Individual project version pages should identify any "homepage" or
+   "download" URLs using ``rel="homepage"`` and ``rel="download"`` attributes
+   on the HTML elements linking to those URLs. Use of these attributes will
+   cause EasyInstall to always follow the provided links, unless it can be
+   determined by inspection that they are downloadable distributions. If the
+   links are not to downloadable distributions, they are retrieved, and if they
+   are HTML, they are scanned for download links. They are *not* scanned for
+   additional "homepage" or "download" links, as these are only processed for
+   pages that are part of a package index site.
+
+7. The root URL of the index, if retrieved with a trailing ``/``, must result
+   in a page containing links to *all* projects' active version pages.
+
+   (Note: This requirement is a workaround for the absence of case-insensitive
+   ``safe_name()`` matching of project names in URL paths. If project names are
+   matched in this fashion (e.g. via the PyPI server, mod_rewrite, or a similar
+   mechanism), then it is not necessary to include this all-packages listing
+   page.)
+
+8. If a package index is accessed via a ``file://`` URL, then EasyInstall will
+   automatically use ``index.html`` files, if present, when trying to read a
+   directory with a trailing ``/`` on the URL.
+
+
+Backward Compatibility
+~~~~~~~~~~~~~~~~~~~~~~
+
+Package indexes that wish to support setuptools versions prior to 0.6b4 should
+also follow these rules:
+
+* Homepage and download links must be preceded with ``"<th>Home Page"`` or
+  ``"<th>Download URL"``, in addition to (or instead of) the ``rel=""``
+  attributes on the actual links.  These marker strings do not need to be
+  visible, or uncommented, however!  For example, the following is a valid
+  homepage link that will work with any version of setuptools::
+
+    <li>
+     <strong>Home Page:</strong>
+     <!-- <th>Home Page -->
+     <a rel="homepage" href="http://sqlobject.org">http://sqlobject.org</a>
+    </li>
+
+  Even though the marker string is in an HTML comment, older versions of
+  EasyInstall will still "see" it and know that the link that follows is the
+  project's home page URL.
+
+* The pages described by paragraph 3(b) of the preceding section *must*
+  contain the string ``"Index of Packages</title>"`` somewhere in their text.
+  This can be inside of an HTML comment, if desired, and it can be anywhere
+  in the page.  (Note: this string MUST NOT appear on normal project pages, as
+  described in paragraphs 2 and 3(a)!)
+
+In addition, for compatibility with PyPI versions that do not use ``#md5=``
+fragment IDs, EasyInstall uses the following regular expression to match PyPI's
+displayed MD5 info (broken onto two lines for readability)::
+
+    <a href="([^"#]+)">([^<]+)</a>\n\s+\(<a href="[^?]+\?:action=show_md5
+    &amp;digest=([0-9a-f]{32})">md5</a>\)
+
+History
+=======
+
+0.6c9
+ * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting
+   is flattened out in the resulting egg.  (There was a case-sensitivity
+   problem that affected some distributions, notably ``pywin32``.)
+
+ * Prevent ``--help-commands`` and other junk from showing under Python 2.5
+   when running ``easy_install --help``.
+
+ * Fixed GUI scripts sometimes not executing on Windows
+
+ * Fixed not picking up dependency links from recursive dependencies.
+
+ * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs
+
+ * Changes for Jython compatibility
+
+ * Improved error message when a requirement is also a directory name, but the
+   specified directory is not a source package.
+
+ * Fixed ``--allow-hosts`` option blocking ``file:`` URLs
+
+ * Fixed HTTP SVN detection failing when the page title included a project
+   name (e.g. on SourceForge-hosted SVN)
+
+ * Fix Jython script installation to handle ``#!`` lines better when
+   ``sys.executable`` is a script.
+
+ * Removed use of deprecated ``md5`` module if ``hashlib`` is available
+
+ * Keep site directories (e.g. ``site-packages``) from being included in
+   ``.pth`` files.
+
+0.6c7
+ * ``ftp:`` download URLs now work correctly.
+
+ * The default ``--index-url`` is now ``http://pypi.python.org/simple``, to use
+   the Python Package Index's new simpler (and faster!) REST API.
+
+0.6c6
+ * EasyInstall no longer aborts the installation process if a URL it wants to
+   retrieve can't be downloaded, unless the URL is an actual package download.
+   Instead, it issues a warning and tries to keep going.
+
+ * Fixed distutils-style scripts originally built on Windows having their line
+   endings doubled when installed on any platform.
+
+ * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects
+   installed using ``setup.py develop``.
+
+ * Fixed not HTML-decoding URLs scraped from web pages
+
+0.6c5
+ * Fixed ``.dll`` files on Cygwin not having executable permisions when an egg
+   is installed unzipped.
+
+0.6c4
+ * Added support for HTTP "Basic" authentication using ``http://user:pass@host``
+   URLs.  If a password-protected page contains links to the same host (and
+   protocol), those links will inherit the credentials used to access the
+   original page.
+
+ * Removed all special support for Sourceforge mirrors, as Sourceforge's
+   mirror system now works well for non-browser downloads.
+
+ * Fixed not recognizing ``win32.exe`` installers that included a custom
+   bitmap.
+
+ * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they
+   are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as
+   is done by ``os.urandom()`` on some platforms).
+
+ * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable``
+   has a space in it (e.g., the user installed Python to a ``Program Files``
+   directory).
+
+0.6c3
+ * You can once again use "python -m easy_install" with Python 2.4 and above.
+
+ * Python 2.5 compatibility fixes added.
+
+0.6c2
+ * Windows script wrappers now support quoted arguments and arguments
+   containing spaces.  (Patch contributed by Jim Fulton.)
+
+ * The ``ez_setup.py`` script now actually works when you put a setuptools
+   ``.egg`` alongside it for bootstrapping an offline machine.
+
+ * A writable installation directory on ``sys.path`` is no longer required to
+   download and extract a source distribution using ``--editable``.
+
+ * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable``
+   contains non-ASCII characters, to prevent deprecation warnings about an
+   unspecified encoding when the script is run.
+
+0.6c1
+ * EasyInstall now includes setuptools version information in the
+   ``User-Agent`` string sent to websites it visits.
+
+0.6b4
+ * Fix creating Python wrappers for non-Python scripts
+
+ * Fix ``ftp://`` directory listing URLs from causing a crash when used in the
+   "Home page" or "Download URL" slots on PyPI.
+
+ * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile
+   or directory is deleted/overwritten.
+
+ * Fix not recognizing HTML 404 pages from package indexes.
+
+ * Allow ``file://`` URLs to be used as a package index.  URLs that refer to
+   directories will use an internally-generated directory listing if there is
+   no ``index.html`` file in the directory.
+
+ * Allow external links in a package index to be specified using
+   ``rel="homepage"`` or ``rel="download"``, without needing the old
+   PyPI-specific visible markup.
+
+ * Suppressed warning message about possibly-misspelled project name, if an egg
+   or link for that project name has already been seen.
+
+0.6b3
+ * Fix local ``--find-links`` eggs not being copied except with
+   ``--always-copy``.
+
+ * Fix sometimes not detecting local packages installed outside of "site"
+   directories.
+
+ * Fix mysterious errors during initial ``setuptools`` install, caused by
+   ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru
+   after deleting the egg from which it's running.
+
+0.6b2
+ * Don't install or update a ``site.py`` patch when installing to a
+   ``PYTHONPATH`` directory with ``--multi-version``, unless an
+   ``easy-install.pth`` file is already in use there.
+
+ * Construct ``.pth`` file paths in such a way that installing an egg whose
+   name begins with ``import`` doesn't cause a syntax error.
+
+ * Fixed a bogus warning message that wasn't updated since the 0.5 versions.
+
+0.6b1
+ * Better ambiguity management: accept ``#egg`` name/version even if processing
+   what appears to be a correctly-named distutils file, and ignore ``.egg``
+   files with no ``-``, since valid Python ``.egg`` files always have a version
+   number (but Scheme eggs often don't).
+
+ * Support ``file://`` links to directories in ``--find-links``, so that
+   easy_install can build packages from local source checkouts.
+
+ * Added automatic retry for Sourceforge mirrors.  The new download process is
+   to first just try dl.sourceforge.net, then randomly select mirror IPs and
+   remove ones that fail, until something works.  The removed IPs stay removed
+   for the remainder of the run.
+
+ * Ignore bdist_dumb distributions when looking at download URLs.
+
+0.6a11
+ * Process ``dependency_links.txt`` if found in a distribution, by adding the
+   URLs to the list for scanning.
+
+ * Use relative paths in ``.pth`` files when eggs are being installed to the
+   same directory as the ``.pth`` file.  This maximizes portability of the
+   target directory when building applications that contain eggs.
+
+ * Added ``easy_install-N.N`` script(s) for convenience when using multiple
+   Python versions.
+
+ * Added automatic handling of installation conflicts.  Eggs are now shifted to
+   the front of sys.path, in an order consistent with where they came from,
+   making EasyInstall seamlessly co-operate with system package managers.
+
+   The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options
+   are now no longer necessary, and will generate warnings at the end of a
+   run if you use them.
+
+ * Don't recursively traverse subdirectories given to ``--find-links``.
+
+0.6a10
+ * Added exhaustive testing of the install directory, including a spawn test
+   for ``.pth`` file support, and directory writability/existence checks.  This
+   should virtually eliminate the need to set or configure ``--site-dirs``.
+
+ * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of
+   RTFM-ing.  :)
+
+ * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it
+   manually to make it work.  ``--multi-version`` is no longer a silent
+   default; you must explicitly use it if installing to a non-PYTHONPATH,
+   non-"site" directory.
+
+ * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``,
+   ``--install-dir``, and ``--script-dir`` options, whether on the command line
+   or in configuration files.
+
+ * Improved SourceForge mirror processing to work faster and be less affected
+   by transient HTML changes made by SourceForge.
+
+ * PyPI searches now use the exact spelling of requirements specified on the
+   command line or in a project's ``install_requires``.  Previously, a
+   normalized form of the name was used, which could lead to unnecessary
+   full-index searches when a project's name had an underscore (``_``) in it.
+
+ * EasyInstall can now download bare ``.py`` files and wrap them in an egg,
+   as long as you include an ``#egg=name-version`` suffix on the URL, or if
+   the ``.py`` file is listed as the "Download URL" on the project's PyPI page.
+   This allows third parties to "package" trivial Python modules just by
+   linking to them (e.g. from within their own PyPI page or download links
+   page).
+
+ * The ``--always-copy`` option now skips "system" and "development" eggs since
+   they can't be reliably copied.  Note that this may cause EasyInstall to
+   choose an older version of a package than what you expected, or it may cause
+   downloading and installation of a fresh version of what's already installed.
+
+ * The ``--find-links`` option previously scanned all supplied URLs and
+   directories as early as possible, but now only directories and direct
+   archive links are scanned immediately.  URLs are not retrieved unless a
+   package search was already going to go online due to a package not being
+   available locally, or due to the use of the ``--update`` or ``-U`` option.
+
+ * Fixed the annoying ``--help-commands`` wart.
+
+0.6a9
+ * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside
+   "baskets") when they weren't explicitly listed in the ``.pth`` file.
+
+ * If more than one URL appears to describe the exact same distribution, prefer
+   the shortest one.  This helps to avoid "table of contents" CGI URLs like the
+   ones on effbot.org.
+
+ * Quote arguments to python.exe (including python's path) to avoid problems
+   when Python (or a script) is installed in a directory whose name contains
+   spaces on Windows.
+
+ * Support full roundtrip translation of eggs to and from ``bdist_wininst``
+   format.  Running ``bdist_wininst`` on a setuptools-based package wraps the
+   egg in an .exe that will safely install it as an egg (i.e., with metadata
+   and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
+   back into an ``.egg`` file or directory and install it as such.
+
+0.6a8
+ * Update for changed SourceForge mirror format
+
+ * Fixed not installing dependencies for some packages fetched via Subversion
+
+ * Fixed dependency installation with ``--always-copy`` not using the same
+   dependency resolution procedure as other operations.
+
+ * Fixed not fully removing temporary directories on Windows, if a Subversion
+   checkout left read-only files behind
+
+ * Fixed some problems building extensions when Pyrex was installed, especially
+   with Python 2.4 and/or packages using SWIG.
+
+0.6a7
+ * Fixed not being able to install Windows script wrappers using Python 2.3
+
+0.6a6
+ * Added support for "traditional" PYTHONPATH-based non-root installation, and
+   also the convenient ``virtual-python.py`` script, based on a contribution
+   by Ian Bicking.  The setuptools egg now contains a hacked ``site`` module
+   that makes the PYTHONPATH-based approach work with .pth files, so that you
+   can get the full EasyInstall feature set on such installations.
+
+ * Added ``--no-deps`` and ``--allow-hosts`` options.
+
+ * Improved Windows ``.exe`` script wrappers so that the script can have the
+   same name as a module without confusing Python.
+
+ * Changed dependency processing so that it's breadth-first, allowing a
+   depender's preferences to override those of a dependee, to prevent conflicts
+   when a lower version is acceptable to the dependee, but not the depender.
+   Also, ensure that currently installed/selected packages aren't given
+   precedence over ones desired by a package being installed, which could
+   cause conflict errors.
+
+0.6a3
+ * Improved error message when trying to use old ways of running
+   ``easy_install``.  Removed the ability to run via ``python -m`` or by
+   running ``easy_install.py``; ``easy_install`` is the command to run on all
+   supported platforms.
+
+ * Improved wrapper script generation and runtime initialization so that a
+   VersionConflict doesn't occur if you later install a competing version of a
+   needed package as the default version of that package.
+
+ * Fixed a problem parsing version numbers in ``#egg=`` links.
+
+0.6a2
+ * EasyInstall can now install "console_scripts" defined by packages that use
+   ``setuptools`` and define appropriate entry points.  On Windows, console
+   scripts get an ``.exe`` wrapper so you can just type their name.  On other
+   platforms, the scripts are installed without a file extension.
+
+ * Using ``python -m easy_install`` or running ``easy_install.py`` is now
+   DEPRECATED, since an ``easy_install`` wrapper is now available on all
+   platforms.
+
+0.6a1
+ * EasyInstall now does MD5 validation of downloads from PyPI, or from any link
+   that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest.
+
+ * EasyInstall now handles symlinks in target directories by removing the link,
+   rather than attempting to overwrite the link's destination.  This makes it
+   easier to set up an alternate Python "home" directory (as described above in
+   the `Non-Root Installation`_ section).
+
+ * Added support for handling MacOS platform information in ``.egg`` filenames,
+   based on a contribution by Kevin Dangoor.  You may wish to delete and
+   reinstall any eggs whose filename includes "darwin" and "Power_Macintosh",
+   because the format for this platform information has changed so that minor
+   OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a
+   previous OS version to become obsolete.
+
+ * easy_install's dependency processing algorithms have changed.  When using
+   ``--always-copy``, it now ensures that dependencies are copied too.  When
+   not using ``--always-copy``, it tries to use a single resolution loop,
+   rather than recursing.
+
+ * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py``
+   extensions.
+
+ * Added ``--site-dirs`` option to allow adding custom "site" directories.
+   Made ``easy-install.pth`` work in platform-specific alternate site
+   directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X).
+
+ * If you manually delete the current version of a package, the next run of
+   EasyInstall against the target directory will now remove the stray entry
+   from the ``easy-install.pth`` file.
+
+ * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID
+   as pointing to the named project's source checkout.  Such URLs have a lower
+   match precedence than any other kind of distribution, so they'll only be
+   used if they have a higher version number than any other available
+   distribution, or if you use the ``--editable`` option.  The ``#egg``
+   fragment can contain a version if it's formatted as ``#egg=proj-ver``,
+   where ``proj`` is the project name, and ``ver`` is the version number.  You
+   *must* use the format for these values that the ``bdist_egg`` command uses;
+   i.e., all non-alphanumeric runs must be condensed to single underscore
+   characters.
+
+ * Added the ``--editable`` option; see `Editing and Viewing Source Packages`_
+   above for more info.  Also, slightly changed the behavior of the
+   ``--build-directory`` option.
+
+ * Fixed the setup script sandbox facility not recognizing certain paths as
+   valid on case-insensitive platforms.
+
+0.5a12
+ * Fix ``python -m easy_install`` not working due to setuptools being installed
+   as a zipfile.  Update safety scanner to check for modules that might be used
+   as ``python -m`` scripts.
+
+ * Misc. fixes for win32.exe support, including changes to support Python 2.4's
+   changed ``bdist_wininst`` format.
+
+0.5a10
+ * Put the ``easy_install`` module back in as a module, as it's needed for
+   ``python -m`` to run it!
+
+ * Allow ``--find-links/-f`` to accept local directories or filenames as well
+   as URLs.
+
+0.5a9
+ * EasyInstall now automatically detects when an "unmanaged" package or
+   module is going to be on ``sys.path`` ahead of a package you're installing,
+   thereby preventing the newer version from being imported.  By default, it
+   will abort installation to alert you of the problem, but there are also
+   new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``)
+   available to change the default behavior.  (Note: this new feature doesn't
+   take effect for egg files that were built with older ``setuptools``
+   versions, because they lack the new metadata file required to implement it.)
+
+ * The ``easy_install`` distutils command now uses ``DistutilsError`` as its
+   base error type for errors that should just issue a message to stderr and
+   exit the program without a traceback.
+
+ * EasyInstall can now be given a path to a directory containing a setup
+   script, and it will attempt to build and install the package there.
+
+ * EasyInstall now performs a safety analysis on module contents to determine
+   whether a package is likely to run in zipped form, and displays
+   information about what modules may be doing introspection that would break
+   when running as a zipfile.
+
+ * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that
+   would ordinarily be considered safe to unzip, and changed the meaning of
+   ``--zip-ok/-z`` to "always leave everything zipped".
+
+0.5a8
+ * There is now a separate documentation page for `setuptools`_; revision
+   history that's not specific to EasyInstall has been moved to that page.
+
+ .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
+
+0.5a5
+ * Made ``easy_install`` a standard ``setuptools`` command, moving it from
+   the ``easy_install`` module to ``setuptools.command.easy_install``.  Note
+   that if you were importing or extending it, you must now change your imports
+   accordingly.  ``easy_install.py`` is still installed as a script, but not as
+   a module.
+
+0.5a4
+ * Added ``--always-copy/-a`` option to always copy needed packages to the
+   installation directory, even if they're already present elsewhere on
+   sys.path. (In previous versions, this was the default behavior, but now
+   you must request it.)
+
+ * Added ``--upgrade/-U`` option to force checking PyPI for latest available
+   version(s) of all packages requested by name and version, even if a matching
+   version is available locally.
+
+ * Added automatic installation of dependencies declared by a distribution
+   being installed.  These dependencies must be listed in the distribution's
+   ``EGG-INFO`` directory, so the distribution has to have declared its
+   dependencies by using setuptools.  If a package has requirements it didn't
+   declare, you'll still have to deal with them yourself.  (E.g., by asking
+   EasyInstall to find and install them.)
+
+ * Added the ``--record`` option to ``easy_install`` for the benefit of tools
+   that run ``setup.py install --record=filename`` on behalf of another
+   packaging system.)
+
+0.5a3
+ * Fixed not setting script permissions to allow execution.
+
+ * Improved sandboxing so that setup scripts that want a temporary directory
+   (e.g. pychecker) can still run in the sandbox.
+
+0.5a2
+ * Fix stupid stupid refactoring-at-the-last-minute typos.  :(
+
+0.5a1
+ * Added support for converting ``.win32.exe`` installers to eggs on the fly.
+   EasyInstall will now recognize such files by name and install them.
+
+ * Fixed a problem with picking the "best" version to install (versions were
+   being sorted as strings, rather than as parsed values)
+
+0.4a4
+ * Added support for the distutils "verbose/quiet" and "dry-run" options, as
+   well as the "optimize" flag.
+
+ * Support downloading packages that were uploaded to PyPI (by scanning all
+   links on package pages, not just the homepage/download links).
+
+0.4a3
+ * Add progress messages to the search/download process so that you can tell
+   what URLs it's reading to find download links.  (Hopefully, this will help
+   people report out-of-date and broken links to package authors, and to tell
+   when they've asked for a package that doesn't exist.)
+
+0.4a2
+ * Added support for installing scripts
+
+ * Added support for setting options via distutils configuration files, and
+   using distutils' default options as a basis for EasyInstall's defaults.
+
+ * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the
+   script installation directory option.
+
+ * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if
+   Python includes SSL support.
+
+0.4a1
+ * Added ``--scan-url`` and ``--index-url`` options, to scan download pages
+   and search PyPI for needed packages.
+
+0.3a4
+ * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single
+   URL installs, to avoid running the wrong setup.py.
+
+0.3a3
+ * Added ``--build-directory=DIR/-b DIR`` option.
+
+ * Added "installation report" that explains how to use 'require()' when doing
+   a multiversion install or alternate installation directory.
+
+ * Added SourceForge mirror auto-select (Contributed by Ian Bicking)
+
+ * Added "sandboxing" that stops a setup script from running if it attempts to
+   write to the filesystem outside of the build area
+
+ * Added more workarounds for packages with quirky ``install_data`` hacks
+
+0.3a2
+ * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as
+   automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking)
+
+ * Misc. bug fixes
+
+0.3a1
+ * Initial release.
+
+
+Future Plans
+============
+
+* Additional utilities to list/remove/verify packages
+* Signature checking?  SSL?  Ability to suppress PyPI search?
+* Display byte progress meter when downloading distributions and long pages?
+* Redirect stdout/stderr to log during run_setup?
+
diff --git a/vendor/distribute-0.6.35/docs/index.txt b/vendor/distribute-0.6.35/docs/index.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5f3b945b200a8a0504d65a1aaf28892d0243a037
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/index.txt
@@ -0,0 +1,36 @@
+Welcome to Distribute's documentation!
+======================================
+
+`Distribute` is a fork of the `Setuptools` project.
+
+Distribute is intended to replace Setuptools as the standard method for
+working with Python module distributions.
+
+For those who may wonder why they should switch to Distribute over Setuptools, it’s quite simple:
+
+- Distribute is a drop-in replacement for Setuptools
+- The code is actively maintained, and has over 10 commiters
+- Distribute offers Python 3 support !
+
+Documentation content:
+
+.. toctree::
+   :maxdepth: 2
+
+   roadmap
+   python3
+   using
+   setuptools
+   easy_install
+   pkg_resources
+
+
+.. image:: http://python-distribute.org/pip_distribute.png
+
+Design done by Idan Gazit (http://pixane.com) - License: cc-by-3.0
+
+Copy & paste::
+
+ curl -O http://python-distribute.org/distribute_setup.py
+ python distribute_setup.py
+ easy_install pip
\ No newline at end of file
diff --git a/vendor/distribute-0.6.35/docs/pkg_resources.txt b/vendor/distribute-0.6.35/docs/pkg_resources.txt
new file mode 100644
index 0000000000000000000000000000000000000000..480f9547ceb1fdf60c55e2e12a393a49ca84207f
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/pkg_resources.txt
@@ -0,0 +1,1955 @@
+=============================================================
+Package Discovery and Resource Access using ``pkg_resources``
+=============================================================
+
+The ``pkg_resources`` module distributed with ``setuptools`` provides an API
+for Python libraries to access their resource files, and for extensible
+applications and frameworks to automatically discover plugins.  It also
+provides runtime support for using C extensions that are inside zipfile-format
+eggs, support for merging packages that have separately-distributed modules or
+subpackages, and APIs for managing Python's current "working set" of active
+packages.
+
+
+.. contents:: **Table of Contents**
+
+
+--------
+Overview
+--------
+
+Eggs are a distribution format for Python modules, similar in concept to Java's
+"jars" or Ruby's "gems".  They differ from previous Python distribution formats
+in that they are importable (i.e. they can be added to ``sys.path``), and they
+are *discoverable*, meaning that they carry metadata that unambiguously
+identifies their contents and dependencies, and thus can be *automatically*
+found and added to ``sys.path`` in response to simple requests of the form,
+"get me everything I need to use docutils' PDF support".
+
+The ``pkg_resources`` module provides runtime facilities for finding,
+introspecting, activating and using eggs and other "pluggable" distribution
+formats.  Because these are new concepts in Python (and not that well-
+established in other languages either), it helps to have a few special terms
+for talking about eggs and how they can be used:
+
+project
+    A library, framework, script, plugin, application, or collection of data
+    or other resources, or some combination thereof.  Projects are assumed to
+    have "relatively unique" names, e.g. names registered with PyPI.
+
+release
+    A snapshot of a project at a particular point in time, denoted by a version
+    identifier.
+
+distribution
+    A file or files that represent a particular release.
+
+importable distribution
+    A file or directory that, if placed on ``sys.path``, allows Python to
+    import any modules contained within it.
+
+pluggable distribution
+    An importable distribution whose filename unambiguously identifies its
+    release (i.e. project and version), and whose contents unamabiguously
+    specify what releases of other projects will satisfy its runtime
+    requirements.
+
+extra
+    An "extra" is an optional feature of a release, that may impose additional
+    runtime requirements.  For example, if docutils PDF support required a
+    PDF support library to be present, docutils could define its PDF support as
+    an "extra", and list what other project releases need to be available in
+    order to provide it.
+
+environment
+    A collection of distributions potentially available for importing, but not
+    necessarily active.  More than one distribution (i.e. release version) for
+    a given project may be present in an environment.
+
+working set
+    A collection of distributions actually available for importing, as on
+    ``sys.path``.  At most one distribution (release version) of a given
+    project may be present in a working set, as otherwise there would be
+    ambiguity as to what to import.
+
+eggs
+    Eggs are pluggable distributions in one of the three formats currently
+    supported by ``pkg_resources``.  There are built eggs, development eggs,
+    and egg links.  Built eggs are directories or zipfiles whose name ends
+    with ``.egg`` and follows the egg naming conventions, and contain an
+    ``EGG-INFO`` subdirectory (zipped or otherwise).  Development eggs are
+    normal directories of Python code with one or more ``ProjectName.egg-info``
+    subdirectories.  And egg links are ``*.egg-link`` files that contain the
+    name of a built or development egg, to support symbolic linking on
+    platforms that do not have native symbolic links.
+
+(For more information about these terms and concepts, see also this
+`architectural overview`_ of ``pkg_resources`` and Python Eggs in general.)
+
+.. _architectural overview: http://mail.python.org/pipermail/distutils-sig/2005-June/004652.html
+
+
+.. -----------------
+.. Developer's Guide
+.. -----------------
+
+.. This section isn't written yet.  Currently planned topics include
+    Accessing Resources
+    Finding and Activating Package Distributions
+        get_provider()
+        require()
+        WorkingSet
+        iter_distributions
+    Running Scripts
+    Configuration
+    Namespace Packages
+    Extensible Applications and Frameworks
+        Locating entry points
+        Activation listeners
+        Metadata access
+        Extended Discovery and Installation
+    Supporting Custom PEP 302 Implementations
+.. For now, please check out the extensive `API Reference`_ below.
+
+
+-------------
+API Reference
+-------------
+
+Namespace Package Support
+=========================
+
+A namespace package is a package that only contains other packages and modules,
+with no direct contents of its own.  Such packages can be split across
+multiple, separately-packaged distributions.  Normally, you do not need to use
+the namespace package APIs directly; instead you should supply the
+``namespace_packages`` argument to ``setup()`` in your project's ``setup.py``.
+See the `setuptools documentation on namespace packages`_ for more information.
+
+However, if for some reason you need to manipulate namespace packages or
+directly alter ``sys.path`` at runtime, you may find these APIs useful:
+
+``declare_namespace(name)``
+    Declare that the dotted package name `name` is a "namespace package" whose
+    contained packages and modules may be spread across multiple distributions.
+    The named package's ``__path__`` will be extended to include the
+    corresponding package in all distributions on ``sys.path`` that contain a
+    package of that name.  (More precisely, if an importer's
+    ``find_module(name)`` returns a loader, then it will also be searched for
+    the package's contents.)  Whenever a Distribution's ``activate()`` method
+    is invoked, it checks for the presence of namespace packages and updates
+    their ``__path__`` contents accordingly.
+
+Applications that manipulate namespace packages or directly alter ``sys.path``
+at runtime may also need to use this API function:
+
+``fixup_namespace_packages(path_item)``
+    Declare that `path_item` is a newly added item on ``sys.path`` that may
+    need to be used to update existing namespace packages.  Ordinarily, this is
+    called for you when an egg is automatically added to ``sys.path``, but if
+    your application modifies ``sys.path`` to include locations that may
+    contain portions of a namespace package, you will need to call this
+    function to ensure they are added to the existing namespace packages.
+
+Although by default ``pkg_resources`` only supports namespace packages for
+filesystem and zip importers, you can extend its support to other "importers"
+compatible with PEP 302 using the ``register_namespace_handler()`` function.
+See the section below on `Supporting Custom Importers`_ for details.
+
+.. _setuptools documentation on namespace packages: http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
+
+
+``WorkingSet`` Objects
+======================
+
+The ``WorkingSet`` class provides access to a collection of "active"
+distributions.  In general, there is only one meaningful ``WorkingSet``
+instance: the one that represents the distributions that are currently active
+on ``sys.path``.  This global instance is available under the name
+``working_set`` in the ``pkg_resources`` module.  However, specialized
+tools may wish to manipulate working sets that don't correspond to
+``sys.path``, and therefore may wish to create other ``WorkingSet`` instances.
+
+It's important to note that the global ``working_set`` object is initialized
+from ``sys.path`` when ``pkg_resources`` is first imported, but is only updated
+if you do all future ``sys.path`` manipulation via ``pkg_resources`` APIs.  If
+you manually modify ``sys.path``, you must invoke the appropriate methods on
+the ``working_set`` instance to keep it in sync.  Unfortunately, Python does
+not provide any way to detect arbitrary changes to a list object like
+``sys.path``, so ``pkg_resources`` cannot automatically update the
+``working_set`` based on changes to ``sys.path``.
+
+``WorkingSet(entries=None)``
+    Create a ``WorkingSet`` from an iterable of path entries.  If `entries`
+    is not supplied, it defaults to the value of ``sys.path`` at the time
+    the constructor is called.
+
+    Note that you will not normally construct ``WorkingSet`` instances
+    yourself, but instead you will implicitly or explicitly use the global
+    ``working_set`` instance.  For the most part, the ``pkg_resources`` API
+    is designed so that the ``working_set`` is used by default, such that you
+    don't have to explicitly refer to it most of the time.
+
+
+Basic ``WorkingSet`` Methods
+----------------------------
+
+The following methods of ``WorkingSet`` objects are also available as module-
+level functions in ``pkg_resources`` that apply to the default ``working_set``
+instance.  Thus, you can use e.g. ``pkg_resources.require()`` as an
+abbreviation for ``pkg_resources.working_set.require()``:
+
+
+``require(*requirements)``
+    Ensure that distributions matching `requirements` are activated
+
+    `requirements` must be a string or a (possibly-nested) sequence
+    thereof, specifying the distributions and versions required.  The
+    return value is a sequence of the distributions that needed to be
+    activated to fulfill the requirements; all relevant distributions are
+    included, even if they were already activated in this working set.
+
+    For the syntax of requirement specifiers, see the section below on
+    `Requirements Parsing`_.
+
+    In general, it should not be necessary for you to call this method
+    directly.  It's intended more for use in quick-and-dirty scripting and
+    interactive interpreter hacking than for production use. If you're creating
+    an actual library or application, it's strongly recommended that you create
+    a "setup.py" script using ``setuptools``, and declare all your requirements
+    there.  That way, tools like EasyInstall can automatically detect what
+    requirements your package has, and deal with them accordingly.
+
+    Note that calling ``require('SomePackage')`` will not install
+    ``SomePackage`` if it isn't already present.  If you need to do this, you
+    should use the ``resolve()`` method instead, which allows you to pass an
+    ``installer`` callback that will be invoked when a needed distribution
+    can't be found on the local machine.  You can then have this callback
+    display a dialog, automatically download the needed distribution, or
+    whatever else is appropriate for your application. See the documentation
+    below on the ``resolve()`` method for more information, and also on the
+    ``obtain()`` method of ``Environment`` objects.
+
+``run_script(requires, script_name)``
+    Locate distribution specified by `requires` and run its `script_name`
+    script.  `requires` must be a string containing a requirement specifier.
+    (See `Requirements Parsing`_ below for the syntax.)
+
+    The script, if found, will be executed in *the caller's globals*.  That's
+    because this method is intended to be called from wrapper scripts that
+    act as a proxy for the "real" scripts in a distribution.  A wrapper script
+    usually doesn't need to do anything but invoke this function with the
+    correct arguments.
+
+    If you need more control over the script execution environment, you
+    probably want to use the ``run_script()`` method of a ``Distribution``
+    object's `Metadata API`_ instead.
+
+``iter_entry_points(group, name=None)``
+    Yield entry point objects from `group` matching `name`
+
+    If `name` is None, yields all entry points in `group` from all
+    distributions in the working set, otherwise only ones matching both
+    `group` and `name` are yielded.  Entry points are yielded from the active
+    distributions in the order that the distributions appear in the working
+    set.  (For the global ``working_set``, this should be the same as the order
+    that they are listed in ``sys.path``.)  Note that within the entry points
+    advertised by an individual distribution, there is no particular ordering.
+
+    Please see the section below on `Entry Points`_ for more information.
+
+
+``WorkingSet`` Methods and Attributes
+-------------------------------------
+
+These methods are used to query or manipulate the contents of a specific
+working set, so they must be explicitly invoked on a particular ``WorkingSet``
+instance:
+
+``add_entry(entry)``
+    Add a path item to the ``entries``, finding any distributions on it.  You
+    should use this when you add additional items to ``sys.path`` and you want
+    the global ``working_set`` to reflect the change.  This method is also
+    called by the ``WorkingSet()`` constructor during initialization.
+
+    This method uses ``find_distributions(entry,True)`` to find distributions
+    corresponding to the path entry, and then ``add()`` them.  `entry` is
+    always appended to the ``entries`` attribute, even if it is already
+    present, however. (This is because ``sys.path`` can contain the same value
+    more than once, and the ``entries`` attribute should be able to reflect
+    this.)
+
+``__contains__(dist)``
+    True if `dist` is active in this ``WorkingSet``.  Note that only one
+    distribution for a given project can be active in a given ``WorkingSet``.
+
+``__iter__()``
+    Yield distributions for non-duplicate projects in the working set.
+    The yield order is the order in which the items' path entries were
+    added to the working set.
+
+``find(req)``
+    Find a distribution matching `req` (a ``Requirement`` instance).
+    If there is an active distribution for the requested project, this
+    returns it, as long as it meets the version requirement specified by
+    `req`.  But, if there is an active distribution for the project and it
+    does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+    If there is no active distribution for the requested project, ``None``
+    is returned.
+
+``resolve(requirements, env=None, installer=None)``
+    List all distributions needed to (recursively) meet `requirements`
+
+    `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+    if supplied, should be an ``Environment`` instance.  If
+    not supplied, an ``Environment`` is created from the working set's
+    ``entries``.  `installer`, if supplied, will be invoked with each
+    requirement that cannot be met by an already-installed distribution; it
+    should return a ``Distribution`` or ``None``.  (See the ``obtain()`` method
+    of `Environment Objects`_, below, for more information on the `installer`
+    argument.)
+
+``add(dist, entry=None)``
+    Add `dist` to working set, associated with `entry`
+
+    If `entry` is unspecified, it defaults to ``dist.location``.  On exit from
+    this routine, `entry` is added to the end of the working set's ``.entries``
+    (if it wasn't already present).
+
+    `dist` is only added to the working set if it's for a project that
+    doesn't already have a distribution active in the set.  If it's
+    successfully added, any  callbacks registered with the ``subscribe()``
+    method will be called.  (See `Receiving Change Notifications`_, below.)
+
+    Note: ``add()`` is automatically called for you by the ``require()``
+    method, so you don't normally need to use this method directly.
+
+``entries``
+    This attribute represents a "shadow" ``sys.path``, primarily useful for
+    debugging.  If you are experiencing import problems, you should check
+    the global ``working_set`` object's ``entries`` against ``sys.path``, to
+    ensure that they match.  If they do not, then some part of your program
+    is manipulating ``sys.path`` without updating the ``working_set``
+    accordingly.  IMPORTANT NOTE: do not directly manipulate this attribute!
+    Setting it equal to ``sys.path`` will not fix your problem, any more than
+    putting black tape over an "engine warning" light will fix your car!  If
+    this attribute is out of sync with ``sys.path``, it's merely an *indicator*
+    of the problem, not the cause of it.
+
+
+Receiving Change Notifications
+------------------------------
+
+Extensible applications and frameworks may need to receive notification when
+a new distribution (such as a plug-in component) has been added to a working
+set.  This is what the ``subscribe()`` method and ``add_activation_listener()``
+function are for.
+
+``subscribe(callback)``
+    Invoke ``callback(distribution)`` once for each active distribution that is
+    in the set now, or gets added later.  Because the callback is invoked for
+    already-active distributions, you do not need to loop over the working set
+    yourself to deal with the existing items; just register the callback and
+    be prepared for the fact that it will be called immediately by this method.
+
+    Note that callbacks *must not* allow exceptions to propagate, or they will
+    interfere with the operation of other callbacks and possibly result in an
+    inconsistent working set state.  Callbacks should use a try/except block
+    to ignore, log, or otherwise process any errors, especially since the code
+    that caused the callback to be invoked is unlikely to be able to handle
+    the errors any better than the callback itself.
+
+``pkg_resources.add_activation_listener()`` is an alternate spelling of
+``pkg_resources.working_set.subscribe()``.
+
+
+Locating Plugins
+----------------
+
+Extensible applications will sometimes have a "plugin directory" or a set of
+plugin directories, from which they want to load entry points or other
+metadata.  The ``find_plugins()`` method allows you to do this, by scanning an
+environment for the newest version of each project that can be safely loaded
+without conflicts or missing requirements.
+
+``find_plugins(plugin_env, full_env=None, fallback=True)``
+   Scan `plugin_env` and identify which distributions could be added to this
+   working set without version conflicts or missing requirements.
+
+   Example usage::
+
+       distributions, errors = working_set.find_plugins(
+           Environment(plugin_dirlist)
+       )
+       map(working_set.add, distributions)  # add plugins+libs to sys.path
+       print "Couldn't load", errors        # display errors
+
+   The `plugin_env` should be an ``Environment`` instance that contains only
+   distributions that are in the project's "plugin directory" or directories.
+   The `full_env`, if supplied, should be an ``Environment`` instance that
+   contains all currently-available distributions.
+
+   If `full_env` is not supplied, one is created automatically from the
+   ``WorkingSet`` this method is called on, which will typically mean that
+   every directory on ``sys.path`` will be scanned for distributions.
+
+   This method returns a 2-tuple: (`distributions`, `error_info`), where
+   `distributions` is a list of the distributions found in `plugin_env` that
+   were loadable, along with any other distributions that are needed to resolve
+   their dependencies.  `error_info` is a dictionary mapping unloadable plugin
+   distributions to an exception instance describing the error that occurred.
+   Usually this will be a ``DistributionNotFound`` or ``VersionConflict``
+   instance.
+
+   Most applications will use this method mainly on the master ``working_set``
+   instance in ``pkg_resources``, and then immediately add the returned
+   distributions to the working set so that they are available on sys.path.
+   This will make it possible to find any entry points, and allow any other
+   metadata tracking and hooks to be activated.
+
+   The resolution algorithm used by ``find_plugins()`` is as follows.  First,
+   the project names of the distributions present in `plugin_env` are sorted.
+   Then, each project's eggs are tried in descending version order (i.e.,
+   newest version first).
+
+   An attempt is made to resolve each egg's dependencies. If the attempt is
+   successful, the egg and its dependencies are added to the output list and to
+   a temporary copy of the working set.  The resolution process continues with
+   the next project name, and no older eggs for that project are tried.
+
+   If the resolution attempt fails, however, the error is added to the error
+   dictionary.  If the `fallback` flag is true, the next older version of the
+   plugin is tried, until a working version is found.  If false, the resolution
+   process continues with the next plugin project name.
+
+   Some applications may have stricter fallback requirements than others. For
+   example, an application that has a database schema or persistent objects
+   may not be able to safely downgrade a version of a package. Others may want
+   to ensure that a new plugin configuration is either 100% good or else
+   revert to a known-good configuration.  (That is, they may wish to revert to
+   a known configuration if the `error_info` return value is non-empty.)
+
+   Note that this algorithm gives precedence to satisfying the dependencies of
+   alphabetically prior project names in case of version conflicts. If two
+   projects named "AaronsPlugin" and "ZekesPlugin" both need different versions
+   of "TomsLibrary", then "AaronsPlugin" will win and "ZekesPlugin" will be
+   disabled due to version conflict.
+
+
+``Environment`` Objects
+=======================
+
+An "environment" is a collection of ``Distribution`` objects, usually ones
+that are present and potentially importable on the current platform.
+``Environment`` objects are used by ``pkg_resources`` to index available
+distributions during dependency resolution.
+
+``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)``
+    Create an environment snapshot by scanning `search_path` for distributions
+    compatible with `platform` and `python`.  `search_path` should be a
+    sequence of strings such as might be used on ``sys.path``.  If a
+    `search_path` isn't supplied, ``sys.path`` is used.
+
+    `platform` is an optional string specifying the name of the platform
+    that platform-specific distributions must be compatible with.  If
+    unspecified, it defaults to the current platform.  `python` is an
+    optional string naming the desired version of Python (e.g. ``'2.4'``);
+    it defaults to the currently-running version.
+
+    You may explicitly set `platform` (and/or `python`) to ``None`` if you
+    wish to include *all* distributions, not just those compatible with the
+    running platform or Python version.
+
+    Note that `search_path` is scanned immediately for distributions, and the
+    resulting ``Environment`` is a snapshot of the found distributions.  It
+    is not automatically updated if the system's state changes due to e.g.
+    installation or removal of distributions.
+
+``__getitem__(project_name)``
+    Returns a list of distributions for the given project name, ordered
+    from newest to oldest version.  (And highest to lowest format precedence
+    for distributions that contain the same version of the project.)  If there
+    are no distributions for the project, returns an empty list.
+
+``__iter__()``
+    Yield the unique project names of the distributions in this environment.
+    The yielded names are always in lower case.
+
+``add(dist)``
+    Add `dist` to the environment if it matches the platform and python version
+    specified at creation time, and only if the distribution hasn't already
+    been added. (i.e., adding the same distribution more than once is a no-op.)
+
+``remove(dist)``
+    Remove `dist` from the environment.
+
+``can_add(dist)``
+    Is distribution `dist` acceptable for this environment?  If it's not
+    compatible with the ``platform`` and ``python`` version values specified
+    when the environment was created, a false value is returned.
+
+``__add__(dist_or_env)``  (``+`` operator)
+    Add a distribution or environment to an ``Environment`` instance, returning
+    a *new* environment object that contains all the distributions previously
+    contained by both.  The new environment will have a ``platform`` and
+    ``python`` of ``None``, meaning that it will not reject any distributions
+    from being added to it; it will simply accept whatever is added.  If you
+    want the added items to be filtered for platform and Python version, or
+    you want to add them to the *same* environment instance, you should use
+    in-place addition (``+=``) instead.
+
+``__iadd__(dist_or_env)``  (``+=`` operator)
+    Add a distribution or environment to an ``Environment`` instance
+    *in-place*, updating the existing instance and returning it.  The
+    ``platform`` and ``python`` filter attributes take effect, so distributions
+    in the source that do not have a suitable platform string or Python version
+    are silently ignored.
+
+``best_match(req, working_set, installer=None)``
+    Find distribution best matching `req` and usable on `working_set`
+
+    This calls the ``find(req)`` method of the `working_set` to see if a
+    suitable distribution is already active.  (This may raise
+    ``VersionConflict`` if an unsuitable version of the project is already
+    active in the specified `working_set`.)  If a suitable distribution isn't
+    active, this method returns the newest distribution in the environment
+    that meets the ``Requirement`` in `req`.  If no suitable distribution is
+    found, and `installer` is supplied, then the result of calling
+    the environment's ``obtain(req, installer)`` method will be returned.
+
+``obtain(requirement, installer=None)``
+    Obtain a distro that matches requirement (e.g. via download).  In the
+    base ``Environment`` class, this routine just returns
+    ``installer(requirement)``, unless `installer` is None, in which case
+    None is returned instead.  This method is a hook that allows subclasses
+    to attempt other ways of obtaining a distribution before falling back
+    to the `installer` argument.
+
+``scan(search_path=None)``
+    Scan `search_path` for distributions usable on `platform`
+
+    Any distributions found are added to the environment.  `search_path` should
+    be a sequence of strings such as might be used on ``sys.path``.  If not
+    supplied, ``sys.path`` is used.  Only distributions conforming to
+    the platform/python version defined at initialization are added.  This
+    method is a shortcut for using the ``find_distributions()`` function to
+    find the distributions from each item in `search_path`, and then calling
+    ``add()`` to add each one to the environment.
+
+
+``Requirement`` Objects
+=======================
+
+``Requirement`` objects express what versions of a project are suitable for
+some purpose.  These objects (or their string form) are used by various
+``pkg_resources`` APIs in order to find distributions that a script or
+distribution needs.
+
+
+Requirements Parsing
+--------------------
+
+``parse_requirements(s)``
+    Yield ``Requirement`` objects for a string or iterable of lines.  Each
+    requirement must start on a new line.  See below for syntax.
+
+``Requirement.parse(s)``
+    Create a ``Requirement`` object from a string or iterable of lines.  A
+    ``ValueError`` is raised if the string or lines do not contain a valid
+    requirement specifier, or if they contain more than one specifier.  (To
+    parse multiple specifiers from a string or iterable of strings, use
+    ``parse_requirements()`` instead.)
+
+    The syntax of a requirement specifier can be defined in EBNF as follows::
+
+        requirement  ::= project_name versionspec? extras?
+        versionspec  ::= comparison version (',' comparison version)*
+        comparison   ::= '<' | '<=' | '!=' | '==' | '>=' | '>'
+        extras       ::= '[' extralist? ']'
+        extralist    ::= identifier (',' identifier)*
+        project_name ::= identifier
+        identifier   ::= [-A-Za-z0-9_]+
+        version      ::= [-A-Za-z0-9_.]+
+
+    Tokens can be separated by whitespace, and a requirement can be continued
+    over multiple lines using a backslash (``\\``).  Line-end comments (using
+    ``#``) are also allowed.
+
+    Some examples of valid requirement specifiers::
+
+        FooProject >= 1.2
+        Fizzy [foo, bar]
+        PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1
+        SomethingWhoseVersionIDontCareAbout
+
+    The project name is the only required portion of a requirement string, and
+    if it's the only thing supplied, the requirement will accept any version
+    of that project.
+
+    The "extras" in a requirement are used to request optional features of a
+    project, that may require additional project distributions in order to
+    function.  For example, if the hypothetical "Report-O-Rama" project offered
+    optional PDF support, it might require an additional library in order to
+    provide that support.  Thus, a project needing Report-O-Rama's PDF features
+    could use a requirement of ``Report-O-Rama[PDF]`` to request installation
+    or activation of both Report-O-Rama and any libraries it needs in order to
+    provide PDF support.  For example, you could use::
+
+        easy_install.py Report-O-Rama[PDF]
+
+    To install the necessary packages using the EasyInstall program, or call
+    ``pkg_resources.require('Report-O-Rama[PDF]')`` to add the necessary
+    distributions to sys.path at runtime.
+
+
+``Requirement`` Methods and Attributes
+--------------------------------------
+
+``__contains__(dist_or_version)``
+    Return true if `dist_or_version` fits the criteria for this requirement.
+    If `dist_or_version` is a ``Distribution`` object, its project name must
+    match the requirement's project name, and its version must meet the
+    requirement's version criteria.  If `dist_or_version` is a string, it is
+    parsed using the ``parse_version()`` utility function.  Otherwise, it is
+    assumed to be an already-parsed version.
+
+    The ``Requirement`` object's version specifiers (``.specs``) are internally
+    sorted into ascending version order, and used to establish what ranges of
+    versions are acceptable.  Adjacent redundant conditions are effectively
+    consolidated (e.g. ``">1, >2"`` produces the same results as ``">1"``, and
+    ``"<2,<3"`` produces the same results as``"<3"``). ``"!="`` versions are
+    excised from the ranges they fall within.  The version being tested for
+    acceptability is then checked for membership in the resulting ranges.
+    (Note that providing conflicting conditions for the same version (e.g.
+    ``"<2,>=2"`` or ``"==2,!=2"``) is meaningless and may therefore produce
+    bizarre results when compared with actual version number(s).)
+
+``__eq__(other_requirement)``
+    A requirement compares equal to another requirement if they have
+    case-insensitively equal project names, version specifiers, and "extras".
+    (The order that extras and version specifiers are in is also ignored.)
+    Equal requirements also have equal hashes, so that requirements can be
+    used in sets or as dictionary keys.
+
+``__str__()``
+    The string form of a ``Requirement`` is a string that, if passed to
+    ``Requirement.parse()``, would return an equal ``Requirement`` object.
+
+``project_name``
+    The name of the required project
+
+``key``
+    An all-lowercase version of the ``project_name``, useful for comparison
+    or indexing.
+
+``extras``
+    A tuple of names of "extras" that this requirement calls for.  (These will
+    be all-lowercase and normalized using the ``safe_extra()`` parsing utility
+    function, so they may not exactly equal the extras the requirement was
+    created with.)
+
+``specs``
+    A list of ``(op,version)`` tuples, sorted in ascending parsed-version
+    order.  The `op` in each tuple is a comparison operator, represented as
+    a string.  The `version` is the (unparsed) version number.  The relative
+    order of tuples containing the same version numbers is undefined, since
+    having more than one operator for a given version is either redundant or
+    self-contradictory.
+
+
+Entry Points
+============
+
+Entry points are a simple way for distributions to "advertise" Python objects
+(such as functions or classes) for use by other distributions.  Extensible
+applications and frameworks can search for entry points with a particular name
+or group, either from a specific distribution or from all active distributions
+on sys.path, and then inspect or load the advertised objects at will.
+
+Entry points belong to "groups" which are named with a dotted name similar to
+a Python package or module name.  For example, the ``setuptools`` package uses
+an entry point named ``distutils.commands`` in order to find commands defined
+by distutils extensions.  ``setuptools`` treats the names of entry points
+defined in that group as the acceptable commands for a setup script.
+
+In a similar way, other packages can define their own entry point groups,
+either using dynamic names within the group (like ``distutils.commands``), or
+possibly using predefined names within the group.  For example, a blogging
+framework that offers various pre- or post-publishing hooks might define an
+entry point group and look for entry points named "pre_process" and
+"post_process" within that group.
+
+To advertise an entry point, a project needs to use ``setuptools`` and provide
+an ``entry_points`` argument to ``setup()`` in its setup script, so that the
+entry points will be included in the distribution's metadata.  For more
+details, see the ``setuptools`` documentation.  (XXX link here to setuptools)
+
+Each project distribution can advertise at most one entry point of a given
+name within the same entry point group.  For example, a distutils extension
+could advertise two different ``distutils.commands`` entry points, as long as
+they had different names.  However, there is nothing that prevents *different*
+projects from advertising entry points of the same name in the same group.  In
+some cases, this is a desirable thing, since the application or framework that
+uses the entry points may be calling them as hooks, or in some other way
+combining them.  It is up to the application or framework to decide what to do
+if multiple distributions advertise an entry point; some possibilities include
+using both entry points, displaying an error message, using the first one found
+in sys.path order, etc.
+
+
+Convenience API
+---------------
+
+In the following functions, the `dist` argument can be a ``Distribution``
+instance, a ``Requirement`` instance, or a string specifying a requirement
+(i.e. project name, version, etc.).  If the argument is a string or
+``Requirement``, the specified distribution is located (and added to sys.path
+if not already present).  An error will be raised if a matching distribution is
+not available.
+
+The `group` argument should be a string containing a dotted identifier,
+identifying an entry point group.  If you are defining an entry point group,
+you should include some portion of your package's name in the group name so as
+to avoid collision with other packages' entry point groups.
+
+``load_entry_point(dist, group, name)``
+    Load the named entry point from the specified distribution, or raise
+    ``ImportError``.
+
+``get_entry_info(dist, group, name)``
+    Return an ``EntryPoint`` object for the given `group` and `name` from
+    the specified distribution.  Returns ``None`` if the distribution has not
+    advertised a matching entry point.
+
+``get_entry_map(dist, group=None)``
+    Return the distribution's entry point map for `group`, or the full entry
+    map for the distribution.  This function always returns a dictionary,
+    even if the distribution advertises no entry points.  If `group` is given,
+    the dictionary maps entry point names to the corresponding ``EntryPoint``
+    object.  If `group` is None, the dictionary maps group names to
+    dictionaries that then map entry point names to the corresponding
+    ``EntryPoint`` instance in that group.
+
+``iter_entry_points(group, name=None)``
+    Yield entry point objects from `group` matching `name`.
+
+    If `name` is None, yields all entry points in `group` from all
+    distributions in the working set on sys.path, otherwise only ones matching
+    both `group` and `name` are yielded.  Entry points are yielded from
+    the active distributions in the order that the distributions appear on
+    sys.path.  (Within entry points for a particular distribution, however,
+    there is no particular ordering.)
+
+    (This API is actually a method of the global ``working_set`` object; see
+    the section above on `Basic WorkingSet Methods`_ for more information.)
+
+
+Creating and Parsing
+--------------------
+
+``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)``
+    Create an ``EntryPoint`` instance.  `name` is the entry point name.  The
+    `module_name` is the (dotted) name of the module containing the advertised
+    object.  `attrs` is an optional tuple of names to look up from the
+    module to obtain the advertised object.  For example, an `attrs` of
+    ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the
+    advertised object could be obtained by the following code::
+
+        import baz
+        advertised_object = baz.foo.bar
+
+    The `extras` are an optional tuple of "extra feature" names that the
+    distribution needs in order to provide this entry point.  When the
+    entry point is loaded, these extra features are looked up in the `dist`
+    argument to find out what other distributions may need to be activated
+    on sys.path; see the ``load()`` method for more details.  The `extras`
+    argument is only meaningful if `dist` is specified.  `dist` must be
+    a ``Distribution`` instance.
+
+``EntryPoint.parse(src, dist=None)`` (classmethod)
+    Parse a single entry point from string `src`
+
+    Entry point syntax follows the form::
+
+        name = some.module:some.attr [extra1,extra2]
+
+    The entry name and module name are required, but the ``:attrs`` and
+    ``[extras]`` parts are optional, as is the whitespace shown between
+    some of the items.  The `dist` argument is passed through to the
+    ``EntryPoint()`` constructor, along with the other values parsed from
+    `src`.
+
+``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod)
+    Parse `lines` (a string or sequence of lines) to create a dictionary
+    mapping entry point names to ``EntryPoint`` objects.  ``ValueError`` is
+    raised if entry point names are duplicated, if `group` is not a valid
+    entry point group name, or if there are any syntax errors.  (Note: the
+    `group` parameter is used only for validation and to create more
+    informative error messages.)  If `dist` is provided, it will be used to
+    set the ``dist`` attribute of the created ``EntryPoint`` objects.
+
+``EntryPoint.parse_map(data, dist=None)`` (classmethod)
+    Parse `data` into a dictionary mapping group names to dictionaries mapping
+    entry point names to ``EntryPoint`` objects.  If `data` is a dictionary,
+    then the keys are used as group names and the values are passed to
+    ``parse_group()`` as the `lines` argument.  If `data` is a string or
+    sequence of lines, it is first split into .ini-style sections (using
+    the ``split_sections()`` utility function) and the section names are used
+    as group names.  In either case, the `dist` argument is passed through to
+    ``parse_group()`` so that the entry points will be linked to the specified
+    distribution.
+
+
+``EntryPoint`` Objects
+----------------------
+
+For simple introspection, ``EntryPoint`` objects have attributes that
+correspond exactly to the constructor argument names: ``name``,
+``module_name``, ``attrs``, ``extras``, and ``dist`` are all available.  In
+addition, the following methods are provided:
+
+``load(require=True, env=None, installer=None)``
+    Load the entry point, returning the advertised Python object, or raise
+    ``ImportError`` if it cannot be obtained.  If `require` is a true value,
+    then ``require(env, installer)`` is called before attempting the import.
+
+``require(env=None, installer=None)``
+    Ensure that any "extras" needed by the entry point are available on
+    sys.path.  ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``,
+    but no ``dist``, or if the named extras are not defined by the
+    distribution.  If `env` is supplied, it must be an ``Environment``, and it
+    will be used to search for needed distributions if they are not already
+    present on sys.path.  If `installer` is supplied, it must be a callable
+    taking a ``Requirement`` instance and returning a matching importable
+    ``Distribution`` instance or None.
+
+``__str__()``
+    The string form of an ``EntryPoint`` is a string that could be passed to
+    ``EntryPoint.parse()`` to produce an equivalent ``EntryPoint``.
+
+
+``Distribution`` Objects
+========================
+
+``Distribution`` objects represent collections of Python code that may or may
+not be importable, and may or may not have metadata and resources associated
+with them.  Their metadata may include information such as what other projects
+the distribution depends on, what entry points the distribution advertises, and
+so on.
+
+
+Getting or Creating Distributions
+---------------------------------
+
+Most commonly, you'll obtain ``Distribution`` objects from a ``WorkingSet`` or
+an ``Environment``.  (See the sections above on `WorkingSet Objects`_ and
+`Environment Objects`_, which are containers for active distributions and
+available distributions, respectively.)  You can also obtain ``Distribution``
+objects from one of these high-level APIs:
+
+``find_distributions(path_item, only=False)``
+    Yield distributions accessible via `path_item`.  If `only` is true, yield
+    only distributions whose ``location`` is equal to `path_item`.  In other
+    words, if `only` is true, this yields any distributions that would be
+    importable if `path_item` were on ``sys.path``.  If `only` is false, this
+    also yields distributions that are "in" or "under" `path_item`, but would
+    not be importable unless their locations were also added to ``sys.path``.
+
+``get_distribution(dist_spec)``
+    Return a ``Distribution`` object for a given ``Requirement`` or string.
+    If `dist_spec` is already a ``Distribution`` instance, it is returned.
+    If it is a ``Requirement`` object or a string that can be parsed into one,
+    it is used to locate and activate a matching distribution, which is then
+    returned.
+
+However, if you're creating specialized tools for working with distributions,
+or creating a new distribution format, you may also need to create
+``Distribution`` objects directly, using one of the three constructors below.
+
+These constructors all take an optional `metadata` argument, which is used to
+access any resources or metadata associated with the distribution.  `metadata`
+must be an object that implements the ``IResourceProvider`` interface, or None.
+If it is None, an ``EmptyProvider`` is used instead.  ``Distribution`` objects
+implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by
+delegating them to the `metadata` object.
+
+``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod)
+    Create a distribution for `location`, which must be a string such as a
+    URL, filename, or other string that might be used on ``sys.path``.
+    `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``.
+    If `basename` ends with ``.egg``, then the project's name, version, python
+    version and platform are extracted from the filename and used to set those
+    properties of the created distribution.  Any additional keyword arguments
+    are forwarded to the ``Distribution()`` constructor.
+
+``Distribution.from_filename(filename, metadata=None**kw)`` (classmethod)
+    Create a distribution by parsing a local filename.  This is a shorter way
+    of saying  ``Distribution.from_location(normalize_path(filename),
+    os.path.basename(filename), metadata)``.  In other words, it creates a
+    distribution whose location is the normalize form of the filename, parsing
+    name and version information from the base portion of the filename.  Any
+    additional keyword arguments are forwarded to the ``Distribution()``
+    constructor.
+
+``Distribution(location,metadata,project_name,version,py_version,platform,precedence)``
+    Create a distribution by setting its properties.  All arguments are
+    optional and default to None, except for `py_version` (which defaults to
+    the current Python version) and `precedence` (which defaults to
+    ``EGG_DIST``; for more details see ``precedence`` under `Distribution
+    Attributes`_ below).  Note that it's usually easier to use the
+    ``from_filename()`` or ``from_location()`` constructors than to specify
+    all these arguments individually.
+
+
+``Distribution`` Attributes
+---------------------------
+
+location
+    A string indicating the distribution's location.  For an importable
+    distribution, this is the string that would be added to ``sys.path`` to
+    make it actively importable.  For non-importable distributions, this is
+    simply a filename, URL, or other way of locating the distribution.
+
+project_name
+    A string, naming the project that this distribution is for.  Project names
+    are defined by a project's setup script, and they are used to identify
+    projects on PyPI.  When a ``Distribution`` is constructed, the
+    `project_name` argument is passed through the ``safe_name()`` utility
+    function to filter out any unacceptable characters.
+
+key
+    ``dist.key`` is short for ``dist.project_name.lower()``.  It's used for
+    case-insensitive comparison and indexing of distributions by project name.
+
+extras
+    A list of strings, giving the names of extra features defined by the
+    project's dependency list (the ``extras_require`` argument specified in
+    the project's setup script).
+
+version
+    A string denoting what release of the project this distribution contains.
+    When a ``Distribution`` is constructed, the `version` argument is passed
+    through the ``safe_version()`` utility function to filter out any
+    unacceptable characters.  If no `version` is specified at construction
+    time, then attempting to access this attribute later will cause the
+    ``Distribution`` to try to discover its version by reading its ``PKG-INFO``
+    metadata file.  If ``PKG-INFO`` is unavailable or can't be parsed,
+    ``ValueError`` is raised.
+
+parsed_version
+    The ``parsed_version`` is a tuple representing a "parsed" form of the
+    distribution's ``version``.  ``dist.parsed_version`` is a shortcut for
+    calling ``parse_version(dist.version)``.  It is used to compare or sort
+    distributions by version.  (See the `Parsing Utilities`_ section below for
+    more information on the ``parse_version()`` function.)  Note that accessing
+    ``parsed_version`` may result in a ``ValueError`` if the ``Distribution``
+    was constructed without a `version` and without `metadata` capable of
+    supplying the missing version info.
+
+py_version
+    The major/minor Python version the distribution supports, as a string.
+    For example, "2.3" or "2.4".  The default is the current version of Python.
+
+platform
+    A string representing the platform the distribution is intended for, or
+    ``None`` if the distribution is "pure Python" and therefore cross-platform.
+    See `Platform Utilities`_ below for more information on platform strings.
+
+precedence
+    A distribution's ``precedence`` is used to determine the relative order of
+    two distributions that have the same ``project_name`` and
+    ``parsed_version``.  The default precedence is ``pkg_resources.EGG_DIST``,
+    which is the highest (i.e. most preferred) precedence.  The full list
+    of predefined precedences, from most preferred to least preferred, is:
+    ``EGG_DIST``, ``BINARY_DIST``, ``SOURCE_DIST``, ``CHECKOUT_DIST``, and
+    ``DEVELOP_DIST``.  Normally, precedences other than ``EGG_DIST`` are used
+    only by the ``setuptools.package_index`` module, when sorting distributions
+    found in a package index to determine their suitability for installation.
+    "System" and "Development" eggs (i.e., ones that use the ``.egg-info``
+    format), however, are automatically given a precedence of ``DEVELOP_DIST``.
+
+
+
+``Distribution`` Methods
+------------------------
+
+``activate(path=None)``
+    Ensure distribution is importable on `path`.  If `path` is None,
+    ``sys.path`` is used instead.  This ensures that the distribution's
+    ``location`` is in the `path` list, and it also performs any necessary
+    namespace package fixups or declarations.  (That is, if the distribution
+    contains namespace packages, this method ensures that they are declared,
+    and that the distribution's contents for those namespace packages are
+    merged with the contents provided by any other active distributions.  See
+    the section above on `Namespace Package Support`_ for more information.)
+
+    ``pkg_resources`` adds a notification callback to the global ``working_set``
+    that ensures this method is called whenever a distribution is added to it.
+    Therefore, you should not normally need to explicitly call this method.
+    (Note that this means that namespace packages on ``sys.path`` are always
+    imported as soon as ``pkg_resources`` is, which is another reason why
+    namespace packages should not contain any code or import statements.)
+
+``as_requirement()``
+    Return a ``Requirement`` instance that matches this distribution's project
+    name and version.
+
+``requires(extras=())``
+    List the ``Requirement`` objects that specify this distribution's
+    dependencies.  If `extras` is specified, it should be a sequence of names
+    of "extras" defined by the distribution, and the list returned will then
+    include any dependencies needed to support the named "extras".
+
+``clone(**kw)``
+    Create a copy of the distribution.  Any supplied keyword arguments override
+    the corresponding argument to the ``Distribution()`` constructor, allowing
+    you to change some of the copied distribution's attributes.
+
+``egg_name()``
+    Return what this distribution's standard filename should be, not including
+    the ".egg" extension.  For example, a distribution for project "Foo"
+    version 1.2 that runs on Python 2.3 for Windows would have an ``egg_name()``
+    of ``Foo-1.2-py2.3-win32``.  Any dashes in the name or version are
+    converted to underscores.  (``Distribution.from_location()`` will convert
+    them back when parsing a ".egg" file name.)
+
+``__cmp__(other)``, ``__hash__()``
+    Distribution objects are hashed and compared on the basis of their parsed
+    version and precedence, followed by their key (lowercase project name),
+    location, Python version, and platform.
+
+The following methods are used to access ``EntryPoint`` objects advertised
+by the distribution.  See the section above on `Entry Points`_ for more
+detailed information about these operations:
+
+``get_entry_info(group, name)``
+    Return the ``EntryPoint`` object for `group` and `name`, or None if no
+    such point is advertised by this distribution.
+
+``get_entry_map(group=None)``
+    Return the entry point map for `group`.  If `group` is None, return
+    a dictionary mapping group names to entry point maps for all groups.
+    (An entry point map is a dictionary of entry point names to ``EntryPoint``
+    objects.)
+
+``load_entry_point(group, name)``
+    Short for ``get_entry_info(group, name).load()``.  Returns the object
+    advertised by the named entry point, or raises ``ImportError`` if
+    the entry point isn't advertised by this distribution, or there is some
+    other import problem.
+
+In addition to the above methods, ``Distribution`` objects also implement all
+of the `IResourceProvider`_ and `IMetadataProvider Methods`_ (which are
+documented in later sections):
+
+* ``has_metadata(name)``
+* ``metadata_isdir(name)``
+* ``metadata_listdir(name)``
+* ``get_metadata(name)``
+* ``get_metadata_lines(name)``
+* ``run_script(script_name, namespace)``
+* ``get_resource_filename(manager, resource_name)``
+* ``get_resource_stream(manager, resource_name)``
+* ``get_resource_string(manager, resource_name)``
+* ``has_resource(resource_name)``
+* ``resource_isdir(resource_name)``
+* ``resource_listdir(resource_name)``
+
+If the distribution was created with a `metadata` argument, these resource and
+metadata access methods are all delegated to that `metadata` provider.
+Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution
+will appear to have no resources or metadata.  This delegation approach is used
+so that supporting custom importers or new distribution formats can be done
+simply by creating an appropriate `IResourceProvider`_ implementation; see the
+section below on `Supporting Custom Importers`_ for more details.
+
+
+``ResourceManager`` API
+=======================
+
+The ``ResourceManager`` class provides uniform access to package resources,
+whether those resources exist as files and directories or are compressed in
+an archive of some kind.
+
+Normally, you do not need to create or explicitly manage ``ResourceManager``
+instances, as the ``pkg_resources`` module creates a global instance for you,
+and makes most of its methods available as top-level names in the
+``pkg_resources`` module namespace.  So, for example, this code actually
+calls the ``resource_string()`` method of the global ``ResourceManager``::
+
+    import pkg_resources
+    my_data = pkg_resources.resource_string(__name__, "foo.dat")
+
+Thus, you can use the APIs below without needing an explicit
+``ResourceManager`` instance; just import and use them as needed.
+
+
+Basic Resource Access
+---------------------
+
+In the following methods, the `package_or_requirement` argument may be either
+a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance.
+If it is a package or module name, the named module or package must be
+importable (i.e., be in a distribution or directory on ``sys.path``), and the
+`resource_name` argument is interpreted relative to the named package.  (Note
+that if a module name is used, then the resource name is relative to the
+package immediately containing the named module.  Also, you should not use use
+a namespace package name, because a namespace package can be spread across
+multiple distributions, and is therefore ambiguous as to which distribution
+should be searched for the resource.)
+
+If it is a ``Requirement``, then the requirement is automatically resolved
+(searching the current ``Environment`` if necessary) and a matching
+distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not
+already present.  (Unless the ``Requirement`` can't be satisfied, in which
+case an exception is raised.)  The `resource_name` argument is then interpreted
+relative to the root of the identified distribution; i.e. its first path
+segment will be treated as a peer of the top-level modules or packages in the
+distribution.
+
+Note that resource names must be ``/``-separated paths and cannot be absolute
+(i.e. no leading ``/``) or contain relative names like ``".."``.  Do *not* use
+``os.path`` routines to manipulate resource paths, as they are *not* filesystem
+paths.
+
+``resource_exists(package_or_requirement, resource_name)``
+    Does the named resource exist?  Return ``True`` or ``False`` accordingly.
+
+``resource_stream(package_or_requirement, resource_name)``
+    Return a readable file-like object for the specified resource; it may be
+    an actual file, a ``StringIO``, or some similar object.  The stream is
+    in "binary mode", in the sense that whatever bytes are in the resource
+    will be read as-is.
+
+``resource_string(package_or_requirement, resource_name)``
+    Return the specified resource as a string.  The resource is read in
+    binary fashion, such that the returned string contains exactly the bytes
+    that are stored in the resource.
+
+``resource_isdir(package_or_requirement, resource_name)``
+    Is the named resource a directory?  Return ``True`` or ``False``
+    accordingly.
+
+``resource_listdir(package_or_requirement, resource_name)``
+    List the contents of the named resource directory, just like ``os.listdir``
+    except that it works even if the resource is in a zipfile.
+
+Note that only ``resource_exists()`` and ``resource_isdir()`` are insensitive
+as to the resource type.  You cannot use ``resource_listdir()`` on a file
+resource, and you can't use ``resource_string()`` or ``resource_stream()`` on
+directory resources.  Using an inappropriate method for the resource type may
+result in an exception or undefined behavior, depending on the platform and
+distribution format involved.
+
+
+Resource Extraction
+-------------------
+
+``resource_filename(package_or_requirement, resource_name)``
+    Sometimes, it is not sufficient to access a resource in string or stream
+    form, and a true filesystem filename is needed.  In such cases, you can
+    use this method (or module-level function) to obtain a filename for a
+    resource.  If the resource is in an archive distribution (such as a zipped
+    egg), it will be extracted to a cache directory, and the filename within
+    the cache will be returned.  If the named resource is a directory, then
+    all resources within that directory (including subdirectories) are also
+    extracted.  If the named resource is a C extension or "eager resource"
+    (see the ``setuptools`` documentation for details), then all C extensions
+    and eager resources are extracted at the same time.
+
+    Archived resources are extracted to a cache location that can be managed by
+    the following two methods:
+
+``set_extraction_path(path)``
+    Set the base path where resources will be extracted to, if needed.
+
+    If you do not call this routine before any extractions take place, the
+    path defaults to the return value of ``get_default_cache()``.  (Which is
+    based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+    platform-specific fallbacks.  See that routine's documentation for more
+    details.)
+
+    Resources are extracted to subdirectories of this path based upon
+    information given by the resource provider.  You may set this to a
+    temporary directory, but then you must call ``cleanup_resources()`` to
+    delete the extracted files when done.  There is no guarantee that
+    ``cleanup_resources()`` will be able to remove all extracted files.  (On
+    Windows, for example, you can't unlink .pyd or .dll files that are still
+    in use.)
+
+    Note that you may not change the extraction path for a given resource
+    manager once resources have been extracted, unless you first call
+    ``cleanup_resources()``.
+
+``cleanup_resources(force=False)``
+    Delete all extracted resource files and directories, returning a list
+    of the file and directory names that could not be successfully removed.
+    This function does not have any concurrency protection, so it should
+    generally only be called when the extraction path is a temporary
+    directory exclusive to a single process.  This method is not
+    automatically called; you must call it explicitly or register it as an
+    ``atexit`` function if you wish to ensure cleanup of a temporary
+    directory used for extractions.
+
+
+"Provider" Interface
+--------------------
+
+If you are implementing an ``IResourceProvider`` and/or ``IMetadataProvider``
+for a new distribution archive format, you may need to use the following
+``IResourceManager`` methods to co-ordinate extraction of resources to the
+filesystem.  If you're not implementing an archive format, however, you have
+no need to use these methods.  Unlike the other methods listed above, they are
+*not* available as top-level functions tied to the global ``ResourceManager``;
+you must therefore have an explicit ``ResourceManager`` instance to use them.
+
+``get_cache_path(archive_name, names=())``
+    Return absolute location in cache for `archive_name` and `names`
+
+    The parent directory of the resulting path will be created if it does
+    not already exist.  `archive_name` should be the base filename of the
+    enclosing egg (which may not be the name of the enclosing zipfile!),
+    including its ".egg" extension.  `names`, if provided, should be a
+    sequence of path name parts "under" the egg's extraction location.
+
+    This method should only be called by resource providers that need to
+    obtain an extraction location, and only for names they intend to
+    extract, as it tracks the generated names for possible cleanup later.
+
+``extraction_error()``
+    Raise an ``ExtractionError`` describing the active exception as interfering
+    with the extraction process.  You should call this if you encounter any
+    OS errors extracting the file to the cache path; it will format the
+    operating system exception for you, and add other information to the
+    ``ExtractionError`` instance that may be needed by programs that want to
+    wrap or handle extraction errors themselves.
+
+``postprocess(tempname, filename)``
+    Perform any platform-specific postprocessing of `tempname`.
+    Resource providers should call this method ONLY after successfully
+    extracting a compressed resource.  They must NOT call it on resources
+    that are already in the filesystem.
+
+    `tempname` is the current (temporary) name of the file, and `filename`
+    is the name it will be renamed to by the caller after this routine
+    returns.
+
+
+Metadata API
+============
+
+The metadata API is used to access metadata resources bundled in a pluggable
+distribution.  Metadata resources are virtual files or directories containing
+information about the distribution, such as might be used by an extensible
+application or framework to connect "plugins".  Like other kinds of resources,
+metadata resource names are ``/``-separated and should not contain ``..`` or
+begin with a ``/``.  You should not use ``os.path`` routines to manipulate
+resource paths.
+
+The metadata API is provided by objects implementing the ``IMetadataProvider``
+or ``IResourceProvider`` interfaces.  ``Distribution`` objects implement this
+interface, as do objects returned by the ``get_provider()`` function:
+
+``get_provider(package_or_requirement)``
+    If a package name is supplied, return an ``IResourceProvider`` for the
+    package.  If a ``Requirement`` is supplied, resolve it by returning a
+    ``Distribution`` from the current working set (searching the current
+    ``Environment`` if necessary and adding the newly found ``Distribution``
+    to the working set).  If the named package can't be imported, or the
+    ``Requirement`` can't be satisfied, an exception is raised.
+
+    NOTE: if you use a package name rather than a ``Requirement``, the object
+    you get back may not be a pluggable distribution, depending on the method
+    by which the package was installed.  In particular, "development" packages
+    and "single-version externally-managed" packages do not have any way to
+    map from a package name to the corresponding project's metadata.  Do not
+    write code that passes a package name to ``get_provider()`` and then tries
+    to retrieve project metadata from the returned object.  It may appear to
+    work when the named package is in an ``.egg`` file or directory, but
+    it will fail in other installation scenarios.  If you want project
+    metadata, you need to ask for a *project*, not a package.
+
+
+``IMetadataProvider`` Methods
+-----------------------------
+
+The methods provided by objects (such as ``Distribution`` instances) that
+implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are:
+
+``has_metadata(name)``
+    Does the named metadata resource exist?
+
+``metadata_isdir(name)``
+    Is the named metadata resource a directory?
+
+``metadata_listdir(name)``
+    List of metadata names in the directory (like ``os.listdir()``)
+
+``get_metadata(name)``
+    Return the named metadata resource as a string.  The data is read in binary
+    mode; i.e., the exact bytes of the resource file are returned.
+
+``get_metadata_lines(name)``
+    Yield named metadata resource as list of non-blank non-comment lines.  This
+    is short for calling ``yield_lines(provider.get_metadata(name))``.  See the
+    section on `yield_lines()`_ below for more information on the syntax it
+    recognizes.
+
+``run_script(script_name, namespace)``
+    Execute the named script in the supplied namespace dictionary.  Raises
+    ``ResolutionError`` if there is no script by that name in the ``scripts``
+    metadata directory.  `namespace` should be a Python dictionary, usually
+    a module dictionary if the script is being run as a module.
+
+
+Exceptions
+==========
+
+``pkg_resources`` provides a simple exception hierarchy for problems that may
+occur when processing requests to locate and activate packages::
+
+    ResolutionError
+        DistributionNotFound
+        VersionConflict
+        UnknownExtra
+
+    ExtractionError
+
+``ResolutionError``
+    This class is used as a base class for the other three exceptions, so that
+    you can catch all of them with a single "except" clause.  It is also raised
+    directly for miscellaneous requirement-resolution problems like trying to
+    run a script that doesn't exist in the distribution it was requested from.
+
+``DistributionNotFound``
+    A distribution needed to fulfill a requirement could not be found.
+
+``VersionConflict``
+    The requested version of a project conflicts with an already-activated
+    version of the same project.
+
+``UnknownExtra``
+    One of the "extras" requested was not recognized by the distribution it
+    was requested from.
+
+``ExtractionError``
+    A problem occurred extracting a resource to the Python Egg cache.  The
+    following attributes are available on instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+
+
+Supporting Custom Importers
+===========================
+
+By default, ``pkg_resources`` supports normal filesystem imports, and
+``zipimport`` importers.  If you wish to use the ``pkg_resources`` features
+with other (PEP 302-compatible) importers or module loaders, you may need to
+register various handlers and support functions using these APIs:
+
+``register_finder(importer_type, distribution_finder)``
+    Register `distribution_finder` to find distributions in ``sys.path`` items.
+    `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path``
+    item handler), and `distribution_finder` is a callable that, when passed a
+    path item, the importer instance, and an `only` flag, yields
+    ``Distribution`` instances found under that path item.  (The `only` flag,
+    if true, means the finder should yield only ``Distribution`` objects whose
+    ``location`` is equal to the path item provided.)
+
+    See the source of the ``pkg_resources.find_on_path`` function for an
+    example finder function.
+
+``register_loader_type(loader_type, provider_factory)``
+    Register `provider_factory` to make ``IResourceProvider`` objects for
+    `loader_type`.  `loader_type` is the type or class of a PEP 302
+    ``module.__loader__``, and `provider_factory` is a function that, when
+    passed a module object, returns an `IResourceProvider`_ for that module,
+    allowing it to be used with the `ResourceManager API`_.
+
+``register_namespace_handler(importer_type, namespace_handler)``
+    Register `namespace_handler` to declare namespace packages for the given
+    `importer_type`.  `importer_type` is the type or class of a PEP 302
+    "importer" (sys.path item handler), and `namespace_handler` is a callable
+    with a signature like this::
+
+        def namespace_handler(importer, path_entry, moduleName, module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the relevant importer object has
+    already agreed that it can handle the relevant path item.  The handler
+    should only return a subpath if the module ``__path__`` does not already
+    contain an equivalent subpath.  Otherwise, it should return None.
+
+    For an example namespace handler, see the source of the
+    ``pkg_resources.file_ns_handler`` function, which is used for both zipfile
+    importing and regular importing.
+
+
+IResourceProvider
+-----------------
+
+``IResourceProvider`` is an abstract class that documents what methods are
+required of objects returned by a `provider_factory` registered with
+``register_loader_type()``.  ``IResourceProvider`` is a subclass of
+``IMetadataProvider``, so objects that implement this interface must also
+implement all of the `IMetadataProvider Methods`_ as well as the methods
+shown here.  The `manager` argument to the methods below must be an object
+that supports the full `ResourceManager API`_ documented above.
+
+``get_resource_filename(manager, resource_name)``
+    Return a true filesystem path for `resource_name`, co-ordinating the
+    extraction with `manager`, if the resource must be unpacked to the
+    filesystem.
+
+``get_resource_stream(manager, resource_name)``
+    Return a readable file-like object for `resource_name`.
+
+``get_resource_string(manager, resource_name)``
+    Return a string containing the contents of `resource_name`.
+
+``has_resource(resource_name)``
+    Does the package contain the named resource?
+
+``resource_isdir(resource_name)``
+    Is the named resource a directory?  Return a false value if the resource
+    does not exist or is not a directory.
+
+``resource_listdir(resource_name)``
+    Return a list of the contents of the resource directory, ala
+    ``os.listdir()``.  Requesting the contents of a non-existent directory may
+    raise an exception.
+
+Note, by the way, that your provider classes need not (and should not) subclass
+``IResourceProvider`` or ``IMetadataProvider``!  These classes exist solely
+for documentation purposes and do not provide any useful implementation code.
+You may instead wish to subclass one of the `built-in resource providers`_.
+
+
+Built-in Resource Providers
+---------------------------
+
+``pkg_resources`` includes several provider classes that are automatically used
+where appropriate.  Their inheritance tree looks like this::
+
+    NullProvider
+        EggProvider
+            DefaultProvider
+                PathMetadata
+            ZipProvider
+                EggMetadata
+        EmptyProvider
+            FileMetadata
+
+
+``NullProvider``
+    This provider class is just an abstract base that provides for common
+    provider behaviors (such as running scripts), given a definition for just
+    a few abstract methods.
+
+``EggProvider``
+    This provider class adds in some egg-specific features that are common
+    to zipped and unzipped eggs.
+
+``DefaultProvider``
+    This provider class is used for unpacked eggs and "plain old Python"
+    filesystem modules.
+
+``ZipProvider``
+    This provider class is used for all zipped modules, whether they are eggs
+    or not.
+
+``EmptyProvider``
+    This provider class always returns answers consistent with a provider that
+    has no metadata or resources.  ``Distribution`` objects created without
+    a ``metadata`` argument use an instance of this provider class instead.
+    Since all ``EmptyProvider`` instances are equivalent, there is no need
+    to have more than one instance.  ``pkg_resources`` therefore creates a
+    global instance of this class under the name ``empty_provider``, and you
+    may use it if you have need of an ``EmptyProvider`` instance.
+
+``PathMetadata(path, egg_info)``
+    Create an ``IResourceProvider`` for a filesystem-based distribution, where
+    `path` is the filesystem location of the importable modules, and `egg_info`
+    is the filesystem location of the distribution's metadata directory.
+    `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an
+    "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for
+    a "development egg".  However, other uses are possible for custom purposes.
+
+``EggMetadata(zipimporter)``
+    Create an ``IResourceProvider`` for a zipfile-based distribution.  The
+    `zipimporter` should be a ``zipimport.zipimporter`` instance, and may
+    represent a "basket" (a zipfile containing multiple ".egg" subdirectories)
+    a specific egg *within* a basket, or a zipfile egg (where the zipfile
+    itself is a ".egg").  It can also be a combination, such as a zipfile egg
+    that also contains other eggs.
+
+``FileMetadata(path_to_pkg_info)``
+    Create an ``IResourceProvider`` that provides exactly one metadata
+    resource: ``PKG-INFO``.  The supplied path should be a distutils PKG-INFO
+    file.  This is basically the same as an ``EmptyProvider``, except that
+    requests for ``PKG-INFO`` will be answered using the contents of the
+    designated file.  (This provider is used to wrap ``.egg-info`` files
+    installed by vendor-supplied system packages.)
+
+
+Utility Functions
+=================
+
+In addition to its high-level APIs, ``pkg_resources`` also includes several
+generally-useful utility routines.  These routines are used to implement the
+high-level APIs, but can also be quite useful by themselves.
+
+
+Parsing Utilities
+-----------------
+
+``parse_version(version)``
+    Parse a project's version string, returning a value that can be used to
+    compare versions by chronological order.  Semantically, the format is a
+    rough cross between distutils' ``StrictVersion`` and ``LooseVersion``
+    classes; if you give it versions that would work with ``StrictVersion``,
+    then they will compare the same way.  Otherwise, comparisons are more like
+    a "smarter" form of ``LooseVersion``.  It is *possible* to create
+    pathological version coding schemes that will fool this parser, but they
+    should be very rare in practice.
+
+    The returned value will be a tuple of strings.  Numeric portions of the
+    version are padded to 8 digits so they will compare numerically, but
+    without relying on how numbers compare relative to strings.  Dots are
+    dropped, but dashes are retained.  Trailing zeros between alpha segments
+    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
+    "2.4". Alphanumeric parts are lower-cased.
+
+    The algorithm assumes that strings like "-" and any alpha string that
+    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
+    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
+    considered newer than "2.4-1", which in turn is newer than "2.4".
+
+    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
+    come before "final" alphabetically) are assumed to be pre-release versions,
+    so that the version "2.4" is considered newer than "2.4a1".  Any "-"
+    characters preceding a pre-release indicator are removed.  (In versions of
+    setuptools prior to 0.6a9, "-" characters were not removed, leading to the
+    unintuitive result that "0.2-rc1" was considered a newer version than
+    "0.2".)
+
+    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
+    "rc" are treated as if they were "c", i.e. as though they were release
+    candidates, and therefore are not as new as a version string that does not
+    contain them.  And the string "dev" is treated as if it were an "@" sign;
+    that is, a version coming before even "a" or "alpha".
+
+.. _yield_lines():
+
+``yield_lines(strs)``
+    Yield non-empty/non-comment lines from a string/unicode or a possibly-
+    nested sequence thereof.  If `strs` is an instance of ``basestring``, it
+    is split into lines, and each non-blank, non-comment line is yielded after
+    stripping leading and trailing whitespace.  (Lines whose first non-blank
+    character is ``#`` are considered comment lines.)
+
+    If `strs` is not an instance of ``basestring``, it is iterated over, and
+    each item is passed recursively to ``yield_lines()``, so that an arbitarily
+    nested sequence of strings, or sequences of sequences of strings can be
+    flattened out to the lines contained therein.  So for example, passing
+    a file object or a list of strings to ``yield_lines`` will both work.
+    (Note that between each string in a sequence of strings there is assumed to
+    be an implicit line break, so lines cannot bridge two strings in a
+    sequence.)
+
+    This routine is used extensively by ``pkg_resources`` to parse metadata
+    and file formats of various kinds, and most other ``pkg_resources``
+    parsing functions that yield multiple values will use it to break up their
+    input.  However, this routine is idempotent, so calling ``yield_lines()``
+    on the output of another call to ``yield_lines()`` is completely harmless.
+
+``split_sections(strs)``
+    Split a string (or possibly-nested iterable thereof), yielding ``(section,
+    content)`` pairs found using an ``.ini``-like syntax.  Each ``section`` is
+    a whitespace-stripped version of the section name ("``[section]``")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any non-blank, non-comment lines before
+    the first section header, they're yielded in a first ``section`` of
+    ``None``.
+
+    This routine uses ``yield_lines()`` as its front end, so you can pass in
+    anything that ``yield_lines()`` accepts, such as an open text file, string,
+    or sequence of strings.  ``ValueError`` is raised if a malformed section
+    header is found (i.e. a line starting with ``[`` but not ending with
+    ``]``).
+
+    Note that this simplistic parser assumes that any line whose first nonblank
+    character is ``[`` is a section heading, so it can't support .ini format
+    variations that allow ``[`` as the first nonblank character on other lines.
+
+``safe_name(name)``
+    Return a "safe" form of a project's name, suitable for use in a
+    ``Requirement`` string, as a distribution name, or a PyPI project name.
+    All non-alphanumeric runs are condensed to single "-" characters, such that
+    a name like "The $$$ Tree" becomes "The-Tree".  Note that if you are
+    generating a filename from this value you should combine it with a call to
+    ``to_filename()`` so all dashes ("-") are replaced by underscores ("_").
+    See ``to_filename()``.
+
+``safe_version(version)``
+    Similar to ``safe_name()`` except that spaces in the input become dots, and
+    dots are allowed to exist in the output.  As with ``safe_name()``, if you
+    are generating a filename from this you should replace any "-" characters
+    in the output with underscores.
+
+``safe_extra(extra)``
+    Return a "safe" form of an extra's name, suitable for use in a requirement
+    string or a setup script's ``extras_require`` keyword.  This routine is
+    similar to ``safe_name()`` except that non-alphanumeric runs are replaced
+    by a single underbar (``_``), and the result is lowercased.
+
+``to_filename(name_or_version)``
+    Escape a name or version string so it can be used in a dash-separated
+    filename (or ``#egg=name-version`` tag) without ambiguity.  You
+    should only pass in values that were returned by ``safe_name()`` or
+    ``safe_version()``.
+
+
+Platform Utilities
+------------------
+
+``get_build_platform()``
+    Return this platform's identifier string.  For Windows, the return value
+    is ``"win32"``, and for Mac OS X it is a string of the form
+    ``"macosx-10.4-ppc"``.  All other platforms return the same uname-based
+    string that the ``distutils.util.get_platform()`` function returns.
+    This string is the minimum platform version required by distributions built
+    on the local machine.  (Backward compatibility note: setuptools versions
+    prior to 0.6b1 called this function ``get_platform()``, and the function is
+    still available under that name for backward compatibility reasons.)
+
+``get_supported_platform()`` (New in 0.6b1)
+    This is the similar to ``get_build_platform()``, but is the maximum
+    platform version that the local machine supports.  You will usually want
+    to use this value as the ``provided`` argument to the
+    ``compatible_platforms()`` function.
+
+``compatible_platforms(provided, required)``
+    Return true if a distribution built on the `provided` platform may be used
+    on the `required` platform.  If either platform value is ``None``, it is
+    considered a wildcard, and the platforms are therefore compatible.
+    Likewise, if the platform strings are equal, they're also considered
+    compatible, and ``True`` is returned.  Currently, the only non-equal
+    platform strings that are considered compatible are Mac OS X platform
+    strings with the same hardware type (e.g. ``ppc``) and major version
+    (e.g. ``10``) with the `provided` platform's minor version being less than
+    or equal to the `required` platform's minor version.
+
+``get_default_cache()``
+    Determine the default cache location for extracting resources from zipped
+    eggs.  This routine returns the ``PYTHON_EGG_CACHE`` environment variable,
+    if set.  Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of
+    the user's "Application Data" directory.  On all other systems, it returns
+    ``os.path.expanduser("~/.python-eggs")`` if ``PYTHON_EGG_CACHE`` is not
+    set.
+
+
+PEP 302 Utilities
+-----------------
+
+``get_importer(path_item)``
+    Retrieve a PEP 302 "importer" for the given path item (which need not
+    actually be on ``sys.path``).  This routine simulates the PEP 302 protocol
+    for obtaining an "importer" object.  It first checks for an importer for
+    the path item in ``sys.path_importer_cache``, and if not found it calls
+    each of the ``sys.path_hooks`` and caches the result if a good importer is
+    found.  If no importer is found, this routine returns an ``ImpWrapper``
+    instance that wraps the builtin import machinery as a PEP 302-compliant
+    "importer" object.  This ``ImpWrapper`` is *not* cached; instead a new
+    instance is returned each time.
+
+    (Note: When run under Python 2.5, this function is simply an alias for
+    ``pkgutil.get_importer()``, and instead of ``pkg_resources.ImpWrapper``
+    instances, it may return ``pkgutil.ImpImporter`` instances.)
+
+
+File/Path Utilities
+-------------------
+
+``ensure_directory(path)``
+    Ensure that the parent directory (``os.path.dirname``) of `path` actually
+    exists, using ``os.makedirs()`` if necessary.
+
+``normalize_path(path)``
+    Return a "normalized" version of `path`, such that two paths represent
+    the same filesystem location if they have equal ``normalized_path()``
+    values.  Specifically, this is a shortcut for calling ``os.path.realpath``
+    and ``os.path.normcase`` on `path`.  Unfortunately, on certain platforms
+    (notably Cygwin and Mac OS X) the ``normcase`` function does not accurately
+    reflect the platform's case-sensitivity, so there is always the possibility
+    of two apparently-different paths being equal on such platforms.
+
+History
+-------
+
+0.6c9
+ * Fix ``resource_listdir('')`` always returning an empty list for zipped eggs.
+
+0.6c7
+ * Fix package precedence problem where single-version eggs installed in
+   ``site-packages`` would take precedence over ``.egg`` files (or directories)
+   installed in ``site-packages``.
+
+0.6c6
+ * Fix extracted C extensions not having executable permissions under Cygwin.
+
+ * Allow ``.egg-link`` files to contain relative paths.
+
+ * Fix cache dir defaults on Windows when multiple environment vars are needed
+   to construct a path.
+
+0.6c4
+ * Fix "dev" versions being considered newer than release candidates.
+
+0.6c3
+ * Python 2.5 compatibility fixes.
+
+0.6c2
+ * Fix a problem with eggs specified directly on ``PYTHONPATH`` on
+   case-insensitive filesystems possibly not showing up in the default
+   working set, due to differing normalizations of ``sys.path`` entries.
+
+0.6b3
+ * Fixed a duplicate path insertion problem on case-insensitive filesystems.
+
+0.6b1
+ * Split ``get_platform()`` into ``get_supported_platform()`` and
+   ``get_build_platform()`` to work around a Mac versioning problem that caused
+   the behavior of ``compatible_platforms()`` to be platform specific.
+
+ * Fix entry point parsing when a standalone module name has whitespace
+   between it and the extras.
+
+0.6a11
+ * Added ``ExtractionError`` and ``ResourceManager.extraction_error()`` so that
+   cache permission problems get a more user-friendly explanation of the
+   problem, and so that programs can catch and handle extraction errors if they
+   need to.
+
+0.6a10
+ * Added the ``extras`` attribute to ``Distribution``, the ``find_plugins()``
+   method to ``WorkingSet``, and the ``__add__()`` and ``__iadd__()`` methods
+   to ``Environment``.
+
+ * ``safe_name()`` now allows dots in project names.
+
+ * There is a new ``to_filename()`` function that escapes project names and
+   versions for safe use in constructing egg filenames from a Distribution
+   object's metadata.
+
+ * Added ``Distribution.clone()`` method, and keyword argument support to other
+   ``Distribution`` constructors.
+
+ * Added the ``DEVELOP_DIST`` precedence, and automatically assign it to
+   eggs using ``.egg-info`` format.
+
+0.6a9
+ * Don't raise an error when an invalid (unfinished) distribution is found
+   unless absolutely necessary.  Warn about skipping invalid/unfinished eggs
+   when building an Environment.
+
+ * Added support for ``.egg-info`` files or directories with version/platform
+   information embedded in the filename, so that system packagers have the
+   option of including ``PKG-INFO`` files to indicate the presence of a
+   system-installed egg, without needing to use ``.egg`` directories, zipfiles,
+   or ``.pth`` manipulation.
+
+ * Changed ``parse_version()`` to remove dashes before pre-release tags, so
+   that ``0.2-rc1`` is considered an *older* version than ``0.2``, and is equal
+   to ``0.2rc1``.  The idea that a dash *always* meant a post-release version
+   was highly non-intuitive to setuptools users and Python developers, who
+   seem to want to use ``-rc`` version numbers a lot.
+
+0.6a8
+ * Fixed a problem with ``WorkingSet.resolve()`` that prevented version
+   conflicts from being detected at runtime.
+
+ * Improved runtime conflict warning message to identify a line in the user's
+   program, rather than flagging the ``warn()`` call in ``pkg_resources``.
+
+ * Avoid giving runtime conflict warnings for namespace packages, even if they
+   were declared by a different package than the one currently being activated.
+
+ * Fix path insertion algorithm for case-insensitive filesystems.
+
+ * Fixed a problem with nested namespace packages (e.g. ``peak.util``) not
+   being set as an attribute of their parent package.
+
+0.6a6
+ * Activated distributions are now inserted in ``sys.path`` (and the working
+   set) just before the directory that contains them, instead of at the end.
+   This allows e.g. eggs in ``site-packages`` to override unmanaged modules in
+   the same location, and allows eggs found earlier on ``sys.path`` to override
+   ones found later.
+
+ * When a distribution is activated, it now checks whether any contained
+   non-namespace modules have already been imported and issues a warning if
+   a conflicting module has already been imported.
+
+ * Changed dependency processing so that it's breadth-first, allowing a
+   depender's preferences to override those of a dependee, to prevent conflicts
+   when a lower version is acceptable to the dependee, but not the depender.
+
+ * Fixed a problem extracting zipped files on Windows, when the egg in question
+   has had changed contents but still has the same version number.
+
+0.6a4
+ * Fix a bug in ``WorkingSet.resolve()`` that was introduced in 0.6a3.
+
+0.6a3
+ * Added ``safe_extra()`` parsing utility routine, and use it for Requirement,
+   EntryPoint, and Distribution objects' extras handling.
+
+0.6a1
+ * Enhanced performance of ``require()`` and related operations when all
+   requirements are already in the working set, and enhanced performance of
+   directory scanning for distributions.
+
+ * Fixed some problems using ``pkg_resources`` w/PEP 302 loaders other than
+   ``zipimport``, and the previously-broken "eager resource" support.
+
+ * Fixed ``pkg_resources.resource_exists()`` not working correctly, along with
+   some other resource API bugs.
+
+ * Many API changes and enhancements:
+
+   * Added ``EntryPoint``, ``get_entry_map``, ``load_entry_point``, and
+     ``get_entry_info`` APIs for dynamic plugin discovery.
+
+   * ``list_resources`` is now ``resource_listdir`` (and it actually works)
+
+   * Resource API functions like ``resource_string()`` that accepted a package
+     name and resource name, will now also accept a ``Requirement`` object in
+     place of the package name (to allow access to non-package data files in
+     an egg).
+
+   * ``get_provider()`` will now accept a ``Requirement`` instance or a module
+     name.  If it is given a ``Requirement``, it will return a corresponding
+     ``Distribution`` (by calling ``require()`` if a suitable distribution
+     isn't already in the working set), rather than returning a metadata and
+     resource provider for a specific module.  (The difference is in how
+     resource paths are interpreted; supplying a module name means resources
+     path will be module-relative, rather than relative to the distribution's
+     root.)
+
+   * ``Distribution`` objects now implement the ``IResourceProvider`` and
+     ``IMetadataProvider`` interfaces, so you don't need to reference the (no
+     longer available) ``metadata`` attribute to get at these interfaces.
+
+   * ``Distribution`` and ``Requirement`` both have a ``project_name``
+     attribute for the project name they refer to.  (Previously these were
+     ``name`` and ``distname`` attributes.)
+
+   * The ``path`` attribute of ``Distribution`` objects is now ``location``,
+     because it isn't necessarily a filesystem path (and hasn't been for some
+     time now).  The ``location`` of ``Distribution`` objects in the filesystem
+     should always be normalized using ``pkg_resources.normalize_path()``; all
+     of the setuptools and EasyInstall code that generates distributions from
+     the filesystem (including ``Distribution.from_filename()``) ensure this
+     invariant, but if you use a more generic API like ``Distribution()`` or
+     ``Distribution.from_location()`` you should take care that you don't
+     create a distribution with an un-normalized filesystem path.
+
+   * ``Distribution`` objects now have an ``as_requirement()`` method that
+     returns a ``Requirement`` for the distribution's project name and version.
+
+   * Distribution objects no longer have an ``installed_on()`` method, and the
+     ``install_on()`` method is now ``activate()`` (but may go away altogether
+     soon).  The ``depends()`` method has also been renamed to ``requires()``,
+     and ``InvalidOption`` is now ``UnknownExtra``.
+
+   * ``find_distributions()`` now takes an additional argument called ``only``,
+     that tells it to only yield distributions whose location is the passed-in
+     path.  (It defaults to False, so that the default behavior is unchanged.)
+
+   * ``AvailableDistributions`` is now called ``Environment``, and the
+     ``get()``, ``__len__()``, and ``__contains__()`` methods were removed,
+     because they weren't particularly useful.  ``__getitem__()`` no longer
+     raises ``KeyError``; it just returns an empty list if there are no
+     distributions for the named project.
+
+   * The ``resolve()`` method of ``Environment`` is now a method of
+     ``WorkingSet`` instead, and the ``best_match()`` method now uses a working
+     set instead of a path list as its second argument.
+
+   * There is a new ``pkg_resources.add_activation_listener()`` API that lets
+     you register a callback for notifications about distributions added to
+     ``sys.path`` (including the distributions already on it).  This is
+     basically a hook for extensible applications and frameworks to be able to
+     search for plugin metadata in distributions added at runtime.
+
+0.5a13
+ * Fixed a bug in resource extraction from nested packages in a zipped egg.
+
+0.5a12
+ * Updated extraction/cache mechanism for zipped resources to avoid inter-
+   process and inter-thread races during extraction.  The default cache
+   location can now be set via the ``PYTHON_EGGS_CACHE`` environment variable,
+   and the default Windows cache is now a ``Python-Eggs`` subdirectory of the
+   current user's "Application Data" directory, if the ``PYTHON_EGGS_CACHE``
+   variable isn't set.
+
+0.5a10
+ * Fix a problem with ``pkg_resources`` being confused by non-existent eggs on
+   ``sys.path`` (e.g. if a user deletes an egg without removing it from the
+   ``easy-install.pth`` file).
+
+ * Fix a problem with "basket" support in ``pkg_resources``, where egg-finding
+   never actually went inside ``.egg`` files.
+
+ * Made ``pkg_resources`` import the module you request resources from, if it's
+   not already imported.
+
+0.5a4
+ * ``pkg_resources.AvailableDistributions.resolve()`` and related methods now
+   accept an ``installer`` argument: a callable taking one argument, a
+   ``Requirement`` instance.  The callable must return a ``Distribution``
+   object, or ``None`` if no distribution is found.  This feature is used by
+   EasyInstall to resolve dependencies by recursively invoking itself.
+
+0.4a4
+ * Fix problems with ``resource_listdir()``, ``resource_isdir()`` and resource
+   directory extraction for zipped eggs.
+
+0.4a3
+ * Fixed scripts not being able to see a ``__file__`` variable in ``__main__``
+
+ * Fixed a problem with ``resource_isdir()`` implementation that was introduced
+   in 0.4a2.
+
+0.4a1
+ * Fixed a bug in requirements processing for exact versions (i.e. ``==`` and
+   ``!=``) when only one condition was included.
+
+ * Added ``safe_name()`` and ``safe_version()`` APIs to clean up handling of
+   arbitrary distribution names and versions found on PyPI.
+
+0.3a4
+ * ``pkg_resources`` now supports resource directories, not just the resources
+   in them.  In particular, there are ``resource_listdir()`` and
+   ``resource_isdir()`` APIs.
+
+ * ``pkg_resources`` now supports "egg baskets" -- .egg zipfiles which contain
+   multiple distributions in subdirectories whose names end with ``.egg``.
+   Having such a "basket" in a directory on ``sys.path`` is equivalent to
+   having the individual eggs in that directory, but the contained eggs can
+   be individually added (or not) to ``sys.path``.  Currently, however, there
+   is no automated way to create baskets.
+
+ * Namespace package manipulation is now protected by the Python import lock.
+
+0.3a1
+ * Initial release.
+
diff --git a/vendor/distribute-0.6.35/docs/python3.txt b/vendor/distribute-0.6.35/docs/python3.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2f6cde4ab35ec9ddfd3d551310cac2586091ab46
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/python3.txt
@@ -0,0 +1,121 @@
+=====================================================
+Supporting both Python 2 and Python 3 with Distribute
+=====================================================
+
+Starting with version 0.6.2, Distribute supports Python 3. Installing and
+using distribute for Python 3 code works exactly the same as for Python 2
+code, but Distribute also helps you to support Python 2 and Python 3 from
+the same source code by letting you run 2to3 on the code as a part of the
+build process, by setting the keyword parameter ``use_2to3`` to True.
+
+
+Distribute as help during porting
+=================================
+
+Distribute can make the porting process much easier by automatically running
+2to3 as a part of the test running. To do this you need to configure the
+setup.py so that you can run the unit tests with ``python setup.py test``.
+
+See :ref:`test` for more information on this.
+
+Once you have the tests running under Python 2, you can add the use_2to3
+keyword parameters to setup(), and start running the tests under Python 3.
+The test command will now first run the build command during which the code
+will be converted with 2to3, and the tests will then be run from the build
+directory, as opposed from the source directory as is normally done.
+
+Distribute will convert all Python files, and also all doctests in Python
+files. However, if you have doctests located in separate text files, these
+will not automatically be converted. By adding them to the
+``convert_2to3_doctests`` keyword parameter Distrubute will convert them as
+well.
+
+By default, the conversion uses all fixers in the ``lib2to3.fixers`` package.
+To use additional fixers, the parameter ``use_2to3_fixers`` can be set
+to a list of names of packages containing fixers. To exclude fixers, the
+parameter ``use_2to3_exclude_fixers`` can be set to fixer names to be
+skipped.
+
+A typical setup.py can look something like this::
+
+    from setuptools import setup
+
+    setup(
+        name='your.module',
+        version = '1.0',
+        description='This is your awesome module',
+        author='You',
+        author_email='your@email',
+        package_dir = {'': 'src'},
+        packages = ['your', 'you.module'],
+        test_suite = 'your.module.tests',
+        use_2to3 = True,
+        convert_2to3_doctests = ['src/your/module/README.txt'],
+        use_2to3_fixers = ['your.fixers'],
+        use_2to3_exclude_fixers = ['lib2to3.fixes.fix_import'],
+    )
+
+Differential conversion
+-----------------------
+
+Note that a file will only be copied and converted during the build process
+if the source file has been changed. If you add a file to the doctests
+that should be converted, it will not be converted the next time you run
+the tests, since it hasn't been modified. You need to remove it from the
+build directory. Also if you run the build, install or test commands before
+adding the use_2to3 parameter, you will have to remove the build directory
+before you run the test command, as the files otherwise will seem updated,
+and no conversion will happen.
+
+In general, if code doesn't seem to be converted, deleting the build directory
+and trying again is a good saferguard against the build directory getting
+"out of sync" with the source directory.
+
+Distributing Python 3 modules
+=============================
+
+You can distribute your modules with Python 3 support in different ways. A
+normal source distribution will work, but can be slow in installing, as the
+2to3 process will be run during the install. But you can also distribute
+the module in binary format, such as a binary egg. That egg will contain the
+already converted code, and hence no 2to3 conversion is needed during install.
+
+Advanced features
+=================
+
+If you don't want to run the 2to3 conversion on the doctests in Python files,
+you can turn that off by setting ``setuptools.use_2to3_on_doctests = False``.
+
+Note on compatibility with setuptools
+=====================================
+
+Setuptools do not know about the new keyword parameters to support Python 3.
+As a result it will warn about the unknown keyword parameters if you use
+setuptools instead of Distribute under Python 2. This is not an error, and
+install process will continue as normal, but if you want to get rid of that
+error this is easy. Simply conditionally add the new parameters into an extra
+dict and pass that dict into setup()::
+
+    from setuptools import setup
+    import sys
+
+    extra = {}
+    if sys.version_info >= (3,):
+        extra['use_2to3'] = True
+        extra['convert_2to3_doctests'] = ['src/your/module/README.txt']
+        extra['use_2to3_fixers'] = ['your.fixers']
+
+    setup(
+        name='your.module',
+        version = '1.0',
+        description='This is your awesome module',
+        author='You',
+        author_email='your@email',
+        package_dir = {'': 'src'},
+        packages = ['your', 'you.module'],
+        test_suite = 'your.module.tests',
+        **extra
+    )
+
+This way the parameters will only be used under Python 3, where you have to
+use Distribute.
diff --git a/vendor/distribute-0.6.35/docs/roadmap.txt b/vendor/distribute-0.6.35/docs/roadmap.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ea5070eaaf8797a273928df2b3791ac669920cdb
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/roadmap.txt
@@ -0,0 +1,86 @@
+=======
+Roadmap
+=======
+
+Distribute has two branches:
+
+- 0.6.x : provides a Setuptools-0.6cX compatible version
+- 0.7.x : will provide a refactoring
+
+0.6.x
+=====
+
+Not "much" is going to happen here, we want this branch to be helpful
+to the community *today* by addressing the 40-or-so bugs
+that were found in Setuptools and never fixed. This is eventually
+happen soon because its development is
+fast : there are up to 5 commiters that are working on it very often
+(and the number grows weekly.)
+
+The biggest issue with this branch is that it is providing the same
+packages and modules setuptools does, and this
+requires some bootstrapping work where we make sure once Distribute is
+installed, all Distribution that requires Setuptools
+will continue to work. This is done by faking the metadata of
+Setuptools 0.6c9. That's the only way we found to do this.
+
+There's one major thing though: thanks to the work of Lennart, Alex,
+Martin, this branch supports Python 3,
+which is great to have to speed up Py3 adoption.
+
+The goal of the 0.6.x is to remove as much bugs as we can, and try if
+possible to remove the patches done
+on Distutils. We will support 0.6.x maintenance for years and we will
+promote its usage everywhere instead of
+Setuptools.
+
+Some new commands are added there, when they are helpful and don't
+interact with the rest. I am thinking
+about "upload_docs" that let you upload documentation to PyPI. The
+goal is to move it to Distutils
+at some point, if the documentation feature of PyPI stays and starts to be used.
+
+0.7.x
+=====
+
+We've started to refactor Distribute with this roadmap in mind (and
+no, as someone said, it's not vaporware,
+we've done a lot already)
+
+- 0.7.x can be installed and used with 0.6.x
+
+- easy_install is going to be deprecated ! use Pip !
+
+- the version system will be deprecated, in favor of the one in Distutils
+
+- no more Distutils monkey-patch that happens once you use the code
+  (things like 'from distutils import cmd; cmd.Command = CustomCommand')
+
+- no more custom site.py (that is: if something misses in Python's
+  site.py we'll add it there instead of patching it)
+
+- no more namespaced packages system, if PEP 382 (namespaces package
+  support) makes it to 2.7
+
+- The code is splitted in many packages and might be distributed under
+  several distributions.
+
+ - distribute.resources: that's the old pkg_resources, but
+   reorganized in clean, pep-8 modules. This package will
+   only contain the query APIs and will focus on being PEP 376
+   compatible. We will promote its usage and see if Pip wants
+   to use it as a basis.
+   It will probably shrink a lot though, once the stdlib provides PEP 376 support.
+
+ - distribute.entrypoints: that's the old pkg_resources entry points
+   system, but on its own. it uses distribute.resources
+
+ - distribute.index: that's package_index and a few other things.
+   everything required to interact with PyPI. We will promote
+   its usage and see if Pip wants to use it as a basis.
+
+ - distribute.core (might be renamed to main): that's everything
+   else, and uses the other packages.
+
+Goal: A first release before (or when) Python 2.7 / 3.2 is out.
+
diff --git a/vendor/distribute-0.6.35/docs/setuptools.txt b/vendor/distribute-0.6.35/docs/setuptools.txt
new file mode 100644
index 0000000000000000000000000000000000000000..fe8bb3f6155318a86e20cf1a67b12b63c20897fc
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/setuptools.txt
@@ -0,0 +1,3230 @@
+==================================================
+Building and Distributing Packages with Distribute
+==================================================
+
+``Distribute`` is a collection of enhancements to the Python ``distutils``
+(for Python 2.3.5 and up on most platforms; 64-bit platforms require a minimum
+of Python 2.4) that allow you to more easily build and distribute Python
+packages, especially ones that have dependencies on other packages.
+
+Packages built and distributed using ``setuptools`` look to the user like
+ordinary Python packages based on the ``distutils``.  Your users don't need to
+install or even know about setuptools in order to use them, and you don't
+have to include the entire setuptools package in your distributions.  By
+including just a single `bootstrap module`_ (an 8K .py file), your package will
+automatically download and install ``setuptools`` if the user is building your
+package from source and doesn't have a suitable version already installed.
+
+.. _bootstrap module: http://nightly.ziade.org/distribute_setup.py
+
+Feature Highlights:
+
+* Automatically find/download/install/upgrade dependencies at build time using
+  the `EasyInstall tool <http://peak.telecommunity.com/DevCenter/EasyInstall>`_,
+  which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
+  automatically scans web pages linked from PyPI to find download links.  (It's
+  the closest thing to CPAN currently available for Python.)
+
+* Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
+  a single-file importable distribution format
+
+* Include data files inside your package directories, where your code can
+  actually use them.  (Python 2.4 distutils also supports this feature, but
+  setuptools provides the feature for Python 2.3 packages also, and supports
+  accessing data files in zipped packages too.)
+
+* Automatically include all packages in your source tree, without listing them
+  individually in setup.py
+
+* Automatically include all relevant files in your source distributions,
+  without needing to create a ``MANIFEST.in`` file, and without having to force
+  regeneration of the ``MANIFEST`` file when your source tree changes.
+
+* Automatically generate wrapper scripts or Windows (console and GUI) .exe
+  files for any number of "main" functions in your project.  (Note: this is not
+  a py2exe replacement; the .exe files rely on the local Python installation.)
+
+* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
+  still work even when the end-user doesn't have Pyrex installed (as long as
+  you include the Pyrex-generated C in your source distribution)
+
+* Command aliases - create project-specific, per-user, or site-wide shortcut
+  names for commonly used commands and options
+
+* PyPI upload support - upload your source distributions and eggs to PyPI
+
+* Deploy your project in "development mode", such that it's available on
+  ``sys.path``, yet can still be edited directly from its source checkout.
+
+* Easily extend the distutils with new commands or ``setup()`` arguments, and
+  distribute/reuse your extensions for multiple projects, without copying code.
+
+* Create extensible applications and frameworks that automatically discover
+  extensions, using simple "entry points" declared in a project's setup script.
+
+In addition to the PyPI downloads, the development version of ``setuptools``
+is available from the `Python SVN sandbox`_, and in-development versions of the
+`0.6 branch`_ are available as well.
+
+.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+
+.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+
+.. contents:: **Table of Contents**
+
+.. _distribute_setup.py: `bootstrap module`_
+
+
+-----------------
+Developer's Guide
+-----------------
+
+
+Installing ``setuptools``
+=========================
+
+Please follow the `EasyInstall Installation Instructions`_ to install the
+current stable version of setuptools.  In particular, be sure to read the
+section on `Custom Installation Locations`_ if you are installing anywhere
+other than Python's ``site-packages`` directory.
+
+.. _EasyInstall Installation Instructions: http://peak.telecommunity.com/DevCenter/EasyInstall#installation-instructions
+
+.. _Custom Installation Locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
+
+If you want the current in-development version of setuptools, you should first
+install a stable version, and then run::
+
+    distribute_setup.py setuptools==dev
+
+This will download and install the latest development (i.e. unstable) version
+of setuptools from the Python Subversion sandbox.
+
+
+Basic Use
+=========
+
+For basic use of setuptools, just import things from setuptools instead of
+the distutils.  Here's a minimal setup script using setuptools::
+
+    from setuptools import setup, find_packages
+    setup(
+        name = "HelloWorld",
+        version = "0.1",
+        packages = find_packages(),
+    )
+
+As you can see, it doesn't take much to use setuptools in a project.
+Just by doing the above, this project will be able to produce eggs, upload to
+PyPI, and automatically include all packages in the directory where the
+setup.py lives.  See the `Command Reference`_ section below to see what
+commands you can give to this setup script.
+
+Of course, before you release your project to PyPI, you'll want to add a bit
+more information to your setup script to help people find or learn about your
+project.  And maybe your project will have grown by then to include a few
+dependencies, and perhaps some data files and scripts::
+
+    from setuptools import setup, find_packages
+    setup(
+        name = "HelloWorld",
+        version = "0.1",
+        packages = find_packages(),
+        scripts = ['say_hello.py'],
+
+        # Project uses reStructuredText, so ensure that the docutils get
+        # installed or upgraded on the target machine
+        install_requires = ['docutils>=0.3'],
+
+        package_data = {
+            # If any package contains *.txt or *.rst files, include them:
+            '': ['*.txt', '*.rst'],
+            # And include any *.msg files found in the 'hello' package, too:
+            'hello': ['*.msg'],
+        },
+
+        # metadata for upload to PyPI
+        author = "Me",
+        author_email = "me@example.com",
+        description = "This is an Example Package",
+        license = "PSF",
+        keywords = "hello world example examples",
+        url = "http://example.com/HelloWorld/",   # project home page, if any
+
+        # could also include long_description, download_url, classifiers, etc.
+    )
+
+In the sections that follow, we'll explain what most of these ``setup()``
+arguments do (except for the metadata ones), and the various ways you might use
+them in your own project(s).
+
+
+Specifying Your Project's Version
+---------------------------------
+
+Setuptools can work well with most versioning schemes; there are, however, a
+few special things to watch out for, in order to ensure that setuptools and
+EasyInstall can always tell what version of your package is newer than another
+version.  Knowing these things will also help you correctly specify what
+versions of other projects your project depends on.
+
+A version consists of an alternating series of release numbers and pre-release
+or post-release tags.  A release number is a series of digits punctuated by
+dots, such as ``2.4`` or ``0.5``.  Each series of digits is treated
+numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the
+same release number, denoting the first subrelease of release 2.  But  ``2.10``
+is the *tenth* subrelease of release 2, and so is a different and newer release
+from ``2.1`` or ``2.1.0``.  Leading zeros within a series of digits are also
+ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``.
+
+Following a release number, you can have either a pre-release or post-release
+tag.  Pre-release tags make a version be considered *older* than the version
+they are appended to.  So, revision ``2.4`` is *newer* than revision ``2.4c1``,
+which in turn is newer than ``2.4b1`` or ``2.4a1``.  Postrelease tags make
+a version be considered *newer* than the version they are appended to.  So,
+revisions like ``2.4-1`` and ``2.4pl3`` are newer than ``2.4``, but are *older*
+than ``2.4.1`` (which has a higher release number).
+
+A pre-release tag is a series of letters that are alphabetically before
+"final".  Some examples of prerelease tags would include ``alpha``, ``beta``,
+``a``, ``c``, ``dev``, and so on.  You do not have to place a dot or dash
+before the prerelease tag if it's immediately after a number, but it's okay to
+do so if you prefer.  Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all
+represent release candidate 1 of version ``2.4``, and are treated as identical
+by setuptools.
+
+In addition, there are three special prerelease tags that are treated as if
+they were the letter ``c``: ``pre``, ``preview``, and ``rc``.  So, version
+``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as
+``2.4c1``, and are treated as identical by setuptools.
+
+A post-release tag is either a series of letters that are alphabetically
+greater than or equal to "final", or a dash (``-``).  Post-release tags are
+generally used to separate patch numbers, port numbers, build numbers, revision
+numbers, or date stamps from the release number.  For example, the version
+``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of
+version ``2.4``.  Or you might use ``2.4-20051127`` to denote a date-stamped
+post-release.
+
+Notice that after each pre or post-release tag, you are free to place another
+release number, followed again by more pre- or post-release tags.  For example,
+``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in-
+development version of the ninth alpha of release 0.6.  Notice that ``dev`` is
+a pre-release tag, so this version is a *lower* version number than ``0.6a9``,
+which would be the actual ninth alpha of release 0.6.  But the ``-r41475`` is
+a post-release tag, so this version is *newer* than ``0.6a9.dev``.
+
+For the most part, setuptools' interpretation of version numbers is intuitive,
+but here are a few tips that will keep you out of trouble in the corner cases:
+
+* Don't stick adjoining pre-release tags together without a dot or number
+  between them.  Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``,
+  *not* a development pre-release of ``1.9a``.  Use ``.dev`` instead, as in
+  ``1.9a.dev``, or separate the prerelease tags with a number, as in
+  ``1.9a0dev``.  ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are
+  identical versions from setuptools' point of view, so you can use whatever
+  scheme you prefer.
+
+* If you want to be certain that your chosen numbering scheme works the way
+  you think it will, you can use the ``pkg_resources.parse_version()`` function
+  to compare different version numbers::
+
+    >>> from pkg_resources import parse_version
+    >>> parse_version('1.9.a.dev') == parse_version('1.9a0dev')
+    True
+    >>> parse_version('2.1-rc2') < parse_version('2.1')
+    True
+    >>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9')
+    True
+
+Once you've decided on a version numbering scheme for your project, you can
+have setuptools automatically tag your in-development releases with various
+pre- or post-release tags.  See the following sections for more details:
+
+* `Tagging and "Daily Build" or "Snapshot" Releases`_
+* `Managing "Continuous Releases" Using Subversion`_
+* The `egg_info`_ command
+
+
+New and Changed ``setup()`` Keywords
+====================================
+
+The following keyword arguments to ``setup()`` are added or changed by
+``setuptools``.  All of them are optional; you do not have to supply them
+unless you need the associated ``setuptools`` feature.
+
+``include_package_data``
+    If set to ``True``, this tells ``setuptools`` to automatically include any
+    data files it finds inside your package directories, that are either under
+    CVS or Subversion control, or which are specified by your ``MANIFEST.in``
+    file.  For more information, see the section below on `Including Data
+    Files`_.
+
+``exclude_package_data``
+    A dictionary mapping package names to lists of glob patterns that should
+    be *excluded* from your package directories.  You can use this to trim back
+    any excess files included by ``include_package_data``.  For a complete
+    description and examples, see the section below on `Including Data Files`_.
+
+``package_data``
+    A dictionary mapping package names to lists of glob patterns.  For a
+    complete description and examples, see the section below on `Including
+    Data Files`_.  You do not need to use this option if you are using
+    ``include_package_data``, unless you need to add e.g. files that are
+    generated by your setup script and build process.  (And are therefore not
+    in source control or are files that you don't want to include in your
+    source distribution.)
+
+``zip_safe``
+    A boolean (True or False) flag specifying whether the project can be
+    safely installed and run from a zip file.  If this argument is not
+    supplied, the ``bdist_egg`` command will have to analyze all of your
+    project's contents for possible problems each time it buids an egg.
+
+``install_requires``
+    A string or list of strings specifying what other distributions need to
+    be installed when this one is.  See the section below on `Declaring
+    Dependencies`_ for details and examples of the format of this argument.
+
+``entry_points``
+    A dictionary mapping entry point group names to strings or lists of strings
+    defining the entry points.  Entry points are used to support dynamic
+    discovery of services or plugins provided by a project.  See `Dynamic
+    Discovery of Services and Plugins`_ for details and examples of the format
+    of this argument.  In addition, this keyword is used to support `Automatic
+    Script Creation`_.
+
+``extras_require``
+    A dictionary mapping names of "extras" (optional features of your project)
+    to strings or lists of strings specifying what other distributions must be
+    installed to support those features.  See the section below on `Declaring
+    Dependencies`_ for details and examples of the format of this argument.
+
+``setup_requires``
+    A string or list of strings specifying what other distributions need to
+    be present in order for the *setup script* to run.  ``setuptools`` will
+    attempt to obtain these (even going so far as to download them using
+    ``EasyInstall``) before processing the rest of the setup script or commands.
+    This argument is needed if you are using distutils extensions as part of
+    your build process; for example, extensions that process setup() arguments
+    and turn them into EGG-INFO metadata files.
+
+    (Note: projects listed in ``setup_requires`` will NOT be automatically
+    installed on the system where the setup script is being run.  They are
+    simply downloaded to the setup directory if they're not locally available
+    already.  If you want them to be installed, as well as being available
+    when the setup script is run, you should add them to ``install_requires``
+    **and** ``setup_requires``.)
+
+``dependency_links``
+    A list of strings naming URLs to be searched when satisfying dependencies.
+    These links will be used if needed to install packages specified by
+    ``setup_requires`` or ``tests_require``.  They will also be written into
+    the egg's metadata for use by tools like EasyInstall to use when installing
+    an ``.egg`` file.
+
+``namespace_packages``
+    A list of strings naming the project's "namespace packages".  A namespace
+    package is a package that may be split across multiple project
+    distributions.  For example, Zope 3's ``zope`` package is a namespace
+    package, because subpackages like ``zope.interface`` and ``zope.publisher``
+    may be distributed separately.  The egg runtime system can automatically
+    merge such subpackages into a single parent package at runtime, as long
+    as you declare them in each project that contains any subpackages of the
+    namespace package, and as long as the namespace package's ``__init__.py``
+    does not contain any code other than a namespace declaration.  See the
+    section below on `Namespace Packages`_ for more information.
+
+``test_suite``
+    A string naming a ``unittest.TestCase`` subclass (or a package or module
+    containing one or more of them, or a method of such a subclass), or naming
+    a function that can be called with no arguments and returns a
+    ``unittest.TestSuite``.  If the named suite is a module, and the module
+    has an ``additional_tests()`` function, it is called and the results are
+    added to the tests to be run.  If the named suite is a package, any
+    submodules and subpackages are recursively added to the overall test suite.
+
+    Specifying this argument enables use of the `test`_ command to run the
+    specified test suite, e.g. via ``setup.py test``.  See the section on the
+    `test`_ command below for more details.
+
+``tests_require``
+    If your project's tests need one or more additional packages besides those
+    needed to install it, you can use this option to specify them.  It should
+    be a string or list of strings specifying what other distributions need to
+    be present for the package's tests to run.  When you run the ``test``
+    command, ``setuptools`` will  attempt to obtain these (even going
+    so far as to download them using ``EasyInstall``).  Note that these
+    required projects will *not* be installed on the system where the tests
+    are run, but only downloaded to the project's setup directory if they're
+    not already installed locally.
+
+.. _test_loader:
+
+``test_loader``
+    If you would like to use a different way of finding tests to run than what
+    setuptools normally uses, you can specify a module name and class name in
+    this argument.  The named class must be instantiable with no arguments, and
+    its instances must support the ``loadTestsFromNames()`` method as defined
+    in the Python ``unittest`` module's ``TestLoader`` class.  Setuptools will
+    pass only one test "name" in the `names` argument: the value supplied for
+    the ``test_suite`` argument.  The loader you specify may interpret this
+    string in any way it likes, as there are no restrictions on what may be
+    contained in a ``test_suite`` string.
+
+    The module name and class name must be separated by a ``:``.  The default
+    value of this argument is ``"setuptools.command.test:ScanningLoader"``.  If
+    you want to use the default ``unittest`` behavior, you can specify
+    ``"unittest:TestLoader"`` as your ``test_loader`` argument instead.  This
+    will prevent automatic scanning of submodules and subpackages.
+
+    The module and class you specify here may be contained in another package,
+    as long as you use the ``tests_require`` option to ensure that the package
+    containing the loader class is available when the ``test`` command is run.
+
+``eager_resources``
+    A list of strings naming resources that should be extracted together, if
+    any of them is needed, or if any C extensions included in the project are
+    imported.  This argument is only useful if the project will be installed as
+    a zipfile, and there is a need to have all of the listed resources be
+    extracted to the filesystem *as a unit*.  Resources listed here
+    should be '/'-separated paths, relative to the source root, so to list a
+    resource ``foo.png`` in package ``bar.baz``, you would include the string
+    ``bar/baz/foo.png`` in this argument.
+
+    If you only need to obtain resources one at a time, or you don't have any C
+    extensions that access other files in the project (such as data files or
+    shared libraries), you probably do NOT need this argument and shouldn't
+    mess with it.  For more details on how this argument works, see the section
+    below on `Automatic Resource Extraction`_.
+
+``use_2to3``
+    Convert the source code from Python 2 to Python 3 with 2to3 during the
+    build process. See :doc:`python3` for more details.
+
+``convert_2to3_doctests``
+    List of doctest source files that need to be converted with 2to3.
+    See :doc:`python3` for more details.
+
+``use_2to3_fixers``
+    A list of modules to search for additional fixers to be used during
+    the 2to3 conversion. See :doc:`python3` for more details.
+
+
+Using ``find_packages()``
+-------------------------
+
+For simple projects, it's usually easy enough to manually add packages to
+the ``packages`` argument of ``setup()``.  However, for very large projects
+(Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the
+package list updated.  That's what ``setuptools.find_packages()`` is for.
+
+``find_packages()`` takes a source directory, and a list of package names or
+patterns to exclude.  If omitted, the source directory defaults to the same
+directory as the setup script.  Some projects use a ``src`` or ``lib``
+directory as the root of their source tree, and those projects would of course
+use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``.  (And
+such projects also need something like ``package_dir = {'':'src'}`` in their
+``setup()`` arguments, but that's just a normal distutils thing.)
+
+Anyway, ``find_packages()`` walks the target directory, and finds Python
+packages by looking for ``__init__.py`` files.  It then filters the list of
+packages using the exclusion patterns.
+
+Exclusion patterns are package names, optionally including wildcards.  For
+example, ``find_packages(exclude=["*.tests"])`` will exclude all packages whose
+last name part is ``tests``.   Or, ``find_packages(exclude=["*.tests",
+"*.tests.*"])`` will also exclude any subpackages of packages named ``tests``,
+but it still won't exclude a top-level ``tests`` package or the children
+thereof.  In fact, if you really want no ``tests`` packages at all, you'll need
+something like this::
+
+    find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
+
+in order to cover all the bases.  Really, the exclusion patterns are intended
+to cover simpler use cases than this, like excluding a single, specified
+package and its subpackages.
+
+Regardless of the target directory or exclusions, the ``find_packages()``
+function returns a list of package names suitable for use as the ``packages``
+argument to ``setup()``, and so is usually the easiest way to set that
+argument in your setup script.  Especially since it frees you from having to
+remember to modify your setup script whenever your project grows additional
+top-level packages or subpackages.
+
+
+Automatic Script Creation
+=========================
+
+Packaging and installing scripts can be a bit awkward with the distutils.  For
+one thing, there's no easy way to have a script's filename match local
+conventions on both Windows and POSIX platforms.  For another, you often have
+to create a separate file just for the "main" script, when your actual "main"
+is a function in a module somewhere.  And even in Python 2.4, using the ``-m``
+option only works for actual ``.py`` files that aren't installed in a package.
+
+``setuptools`` fixes all of these problems by automatically generating scripts
+for you with the correct extension, and on Windows it will even create an
+``.exe`` file so that users don't have to change their ``PATHEXT`` settings.
+The way to use this feature is to define "entry points" in your setup script
+that indicate what function the generated script should import and run.  For
+example, to create two console scripts called ``foo`` and ``bar``, and a GUI
+script called ``baz``, you might do something like this::
+
+    setup(
+        # other arguments here...
+        entry_points = {
+            'console_scripts': [
+                'foo = my_package.some_module:main_func',
+                'bar = other_module:some_func',
+            ],
+            'gui_scripts': [
+                'baz = my_package_gui.start_func',
+            ]
+        }
+    )
+
+When this project is installed on non-Windows platforms (using "setup.py
+install", "setup.py develop", or by using EasyInstall), a set of ``foo``,
+``bar``, and ``baz`` scripts will be installed that import ``main_func`` and
+``some_func`` from the specified modules.  The functions you specify are called
+with no arguments, and their return value is passed to ``sys.exit()``, so you
+can return an errorlevel or message to print to stderr.
+
+On Windows, a set of ``foo.exe``, ``bar.exe``, and ``baz.exe`` launchers are
+created, alongside a set of ``foo.py``, ``bar.py``, and ``baz.pyw`` files.  The
+``.exe`` wrappers find and execute the right version of Python to run the
+``.py`` or ``.pyw`` file.
+
+You may define as many "console script" and "gui script" entry points as you
+like, and each one can optionally specify "extras" that it depends on, that
+will be added to ``sys.path`` when the script is run.  For more information on
+"extras", see the section below on `Declaring Extras`_.  For more information
+on "entry points" in general, see the section below on `Dynamic Discovery of
+Services and Plugins`_.
+
+
+"Eggsecutable" Scripts
+----------------------
+
+Occasionally, there are situations where it's desirable to make an ``.egg``
+file directly executable.  You can do this by including an entry point such
+as the following::
+
+    setup(
+        # other arguments here...
+        entry_points = {
+            'setuptools.installation': [
+                'eggsecutable = my_package.some_module:main_func',
+            ]
+        }
+    )
+
+Any eggs built from the above setup script will include a short excecutable
+prelude that imports and calls ``main_func()`` from ``my_package.some_module``.
+The prelude can be run on Unix-like platforms (including Mac and Linux) by
+invoking the egg with ``/bin/sh``, or by enabling execute permissions on the
+``.egg`` file.  For the executable prelude to run, the appropriate version of
+Python must be available via the ``PATH`` environment variable, under its
+"long" name.  That is, if the egg is built for Python 2.3, there must be a
+``python2.3`` executable present in a directory on ``PATH``.
+
+This feature is primarily intended to support distribute_setup the installation of
+setuptools itself on non-Windows platforms, but may also be useful for other
+projects as well.
+
+IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or
+invoked via symlinks.  They *must* be invoked using their original filename, in
+order to ensure that, once running, ``pkg_resources`` will know what project
+and version is in use.  The header script will check this and exit with an
+error if the ``.egg`` file has been renamed or is invoked via a symlink that
+changes its base name.
+
+
+Declaring Dependencies
+======================
+
+``setuptools`` supports automatically installing dependencies when a package is
+installed, and including information about dependencies in Python Eggs (so that
+package management tools like EasyInstall can use the information).
+
+``setuptools`` and ``pkg_resources`` use a common syntax for specifying a
+project's required dependencies.  This syntax consists of a project's PyPI
+name, optionally followed by a comma-separated list of "extras" in square
+brackets, optionally followed by a comma-separated list of version
+specifiers.  A version specifier is one of the operators ``<``, ``>``, ``<=``,
+``>=``, ``==`` or ``!=``, followed by a version identifier.  Tokens may be
+separated by whitespace, but any whitespace or nonstandard characters within a
+project name or version identifier must be replaced with ``-``.
+
+Version specifiers for a given project are internally sorted into ascending
+version order, and used to establish what ranges of versions are acceptable.
+Adjacent redundant conditions are also consolidated (e.g. ``">1, >2"`` becomes
+``">1"``, and ``"<2,<3"`` becomes ``"<3"``). ``"!="`` versions are excised from
+the ranges they fall within.  A project's version is then checked for
+membership in the resulting ranges. (Note that providing conflicting conditions
+for the same version (e.g. "<2,>=2" or "==2,!=2") is meaningless and may
+therefore produce bizarre results.)
+
+Here are some example requirement specifiers::
+
+    docutils >= 0.3
+
+    # comment lines and \ continuations are allowed in requirement strings
+    BazSpam ==1.1, ==1.2, ==1.3, ==1.4, ==1.5, \
+        ==1.6, ==1.7  # and so are line-end comments
+
+    PEAK[FastCGI, reST]>=0.5a4
+
+    setuptools==0.5a7
+
+The simplest way to include requirement specifiers is to use the
+``install_requires`` argument to ``setup()``.  It takes a string or list of
+strings containing requirement specifiers.  If you include more than one
+requirement in a string, each requirement must begin on a new line.
+
+This has three effects:
+
+1. When your project is installed, either by using EasyInstall, ``setup.py
+   install``, or ``setup.py develop``, all of the dependencies not already
+   installed will be located (via PyPI), downloaded, built (if necessary),
+   and installed.
+
+2. Any scripts in your project will be installed with wrappers that verify
+   the availability of the specified dependencies at runtime, and ensure that
+   the correct versions are added to ``sys.path`` (e.g. if multiple versions
+   have been installed).
+
+3. Python Egg distributions will include a metadata file listing the
+   dependencies.
+
+Note, by the way, that if you declare your dependencies in ``setup.py``, you do
+*not* need to use the ``require()`` function in your scripts or modules, as
+long as you either install the project or use ``setup.py develop`` to do
+development work on it.  (See `"Development Mode"`_ below for more details on
+using ``setup.py develop``.)
+
+
+Dependencies that aren't in PyPI
+--------------------------------
+
+If your project depends on packages that aren't registered in PyPI, you may
+still be able to depend on them, as long as they are available for download
+as:
+
+- an egg, in the standard distutils ``sdist`` format,
+- a single ``.py`` file, or
+- a VCS repository (Subversion, Mercurial, or Git).
+
+You just need to add some URLs to the ``dependency_links`` argument to
+``setup()``.
+
+The URLs must be either:
+
+1. direct download URLs,
+2. the URLs of web pages that contain direct download links, or
+3. the repository's URL
+
+In general, it's better to link to web pages, because it is usually less
+complex to update a web page than to release a new version of your project.
+You can also use a SourceForge ``showfiles.php`` link in the case where a
+package you depend on is distributed via SourceForge.
+
+If you depend on a package that's distributed as a single ``.py`` file, you
+must include an ``"#egg=project-version"`` suffix to the URL, to give a project
+name and version number.  (Be sure to escape any dashes in the name or version
+by replacing them with underscores.)  EasyInstall will recognize this suffix
+and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file
+as an egg.
+
+In the case of a VCS checkout, you should also append ``#egg=project-version``
+in order to identify for what package that checkout should be used. You can
+append ``@REV`` to the URL's path (before the fragment) to specify a revision.
+Additionally, you can also force the VCS being used by prepending the URL with
+a certain prefix. Currently available are:
+
+-  ``svn+URL`` for Subversion,
+-  ``git+URL`` for Git, and
+-  ``hg+URL`` for Mercurial
+
+A more complete example would be:
+
+    ``vcs+proto://host/path@revision#egg=project-version``
+
+Be careful with the version. It should match the one inside the project files.
+If you want do disregard the version, you have to omit it both in the
+``requires`` and in the URL's fragment.
+
+This will do a checkout (or a clone, in Git and Mercurial parlance) to a
+temporary folder and run ``setup.py bdist_egg``.
+
+The ``dependency_links`` option takes the form of a list of URL strings.  For
+example, the below will cause EasyInstall to search the specified page for
+eggs or source distributions, if the package's dependencies aren't already
+installed::
+
+    setup(
+        ...
+        dependency_links = [
+            "http://peak.telecommunity.com/snapshots/"
+        ],
+    )
+
+
+.. _Declaring Extras:
+
+
+Declaring "Extras" (optional features with their own dependencies)
+------------------------------------------------------------------
+
+Sometimes a project has "recommended" dependencies, that are not required for
+all uses of the project.  For example, a project might offer optional PDF
+output if ReportLab is installed, and reStructuredText support if docutils is
+installed.  These optional features are called "extras", and setuptools allows
+you to define their requirements as well.  In this way, other projects that
+require these optional features can force the additional requirements to be
+installed, by naming the desired extras in their ``install_requires``.
+
+For example, let's say that Project A offers optional PDF and reST support::
+
+    setup(
+        name="Project-A",
+        ...
+        extras_require = {
+            'PDF':  ["ReportLab>=1.2", "RXP"],
+            'reST': ["docutils>=0.3"],
+        }
+    )
+
+As you can see, the ``extras_require`` argument takes a dictionary mapping
+names of "extra" features, to strings or lists of strings describing those
+features' requirements.  These requirements will *not* be automatically
+installed unless another package depends on them (directly or indirectly) by
+including the desired "extras" in square brackets after the associated project
+name.  (Or if the extras were listed in a requirement spec on the EasyInstall
+command line.)
+
+Extras can be used by a project's `entry points`_ to specify dynamic
+dependencies.  For example, if Project A includes a "rst2pdf" script, it might
+declare it like this, so that the "PDF" requirements are only resolved if the
+"rst2pdf" script is run::
+
+    setup(
+        name="Project-A",
+        ...
+        entry_points = {
+            'console_scripts':
+                ['rst2pdf = project_a.tools.pdfgen [PDF]'],
+                ['rst2html = project_a.tools.htmlgen'],
+                # more script entry points ...
+        }
+    )
+
+Projects can also use another project's extras when specifying dependencies.
+For example, if project B needs "project A" with PDF support installed, it
+might declare the dependency like this::
+
+    setup(
+        name="Project-B",
+        install_requires = ["Project-A[PDF]"],
+        ...
+    )
+
+This will cause ReportLab to be installed along with project A, if project B is
+installed -- even if project A was already installed.  In this way, a project
+can encapsulate groups of optional "downstream dependencies" under a feature
+name, so that packages that depend on it don't have to know what the downstream
+dependencies are.  If a later version of Project A builds in PDF support and
+no longer needs ReportLab, or if it ends up needing other dependencies besides
+ReportLab in order to provide PDF support, Project B's setup information does
+not need to change, but the right packages will still be installed if needed.
+
+Note, by the way, that if a project ends up not needing any other packages to
+support a feature, it should keep an empty requirements list for that feature
+in its ``extras_require`` argument, so that packages depending on that feature
+don't break (due to an invalid feature name).  For example, if Project A above
+builds in PDF support and no longer needs ReportLab, it could change its
+setup to this::
+
+    setup(
+        name="Project-A",
+        ...
+        extras_require = {
+            'PDF':  [],
+            'reST': ["docutils>=0.3"],
+        }
+    )
+
+so that Package B doesn't have to remove the ``[PDF]`` from its requirement
+specifier.
+
+
+Including Data Files
+====================
+
+The distutils have traditionally allowed installation of "data files", which
+are placed in a platform-specific location.  However, the most common use case
+for data files distributed with a package is for use *by* the package, usually
+by including the data files in the package directory.
+
+Setuptools offers three ways to specify data files to be included in your
+packages.  First, you can simply use the ``include_package_data`` keyword,
+e.g.::
+
+    from setuptools import setup, find_packages
+    setup(
+        ...
+        include_package_data = True
+    )
+
+This tells setuptools to install any data files it finds in your packages.
+The data files must be under CVS or Subversion control, or else they must be
+specified via the distutils' ``MANIFEST.in`` file.  (They can also be tracked
+by another revision control system, using an appropriate plugin.  See the
+section below on `Adding Support for Other Revision Control Systems`_ for
+information on how to write such plugins.)
+
+If the data files are not under version control, or are not in a supported
+version control system, or if you want finer-grained control over what files
+are included (for example, if you have documentation files in your package
+directories and want to exclude them from installation), then you can also use
+the ``package_data`` keyword, e.g.::
+
+    from setuptools import setup, find_packages
+    setup(
+        ...
+        package_data = {
+            # If any package contains *.txt or *.rst files, include them:
+            '': ['*.txt', '*.rst'],
+            # And include any *.msg files found in the 'hello' package, too:
+            'hello': ['*.msg'],
+        }
+    )
+
+The ``package_data`` argument is a dictionary that maps from package names to
+lists of glob patterns.  The globs may include subdirectory names, if the data
+files are contained in a subdirectory of the package.  For example, if the
+package tree looks like this::
+
+    setup.py
+    src/
+        mypkg/
+            __init__.py
+            mypkg.txt
+            data/
+                somefile.dat
+                otherdata.dat
+
+The setuptools setup file might look like this::
+
+    from setuptools import setup, find_packages
+    setup(
+        ...
+        packages = find_packages('src'),  # include all packages under src
+        package_dir = {'':'src'},   # tell distutils packages are under src
+
+        package_data = {
+            # If any package contains *.txt files, include them:
+            '': ['*.txt'],
+            # And include any *.dat files found in the 'data' subdirectory
+            # of the 'mypkg' package, also:
+            'mypkg': ['data/*.dat'],
+        }
+    )
+
+Notice that if you list patterns in ``package_data`` under the empty string,
+these patterns are used to find files in every package, even ones that also
+have their own patterns listed.  Thus, in the above example, the ``mypkg.txt``
+file gets included even though it's not listed in the patterns for ``mypkg``.
+
+Also notice that if you use paths, you *must* use a forward slash (``/``) as
+the path separator, even if you are on Windows.  Setuptools automatically
+converts slashes to appropriate platform-specific separators at build time.
+
+(Note: although the ``package_data`` argument was previously only available in
+``setuptools``, it was also added to the Python ``distutils`` package as of
+Python 2.4; there is `some documentation for the feature`__ available on the
+python.org website.  If using the setuptools-specific ``include_package_data``
+argument, files specified by ``package_data`` will *not* be automatically
+added to the manifest unless they are tracked by a supported version control
+system, or are listed in the MANIFEST.in file.)
+
+__ http://docs.python.org/dist/node11.html
+
+Sometimes, the ``include_package_data`` or ``package_data`` options alone
+aren't sufficient to precisely define what files you want included.  For
+example, you may want to include package README files in your revision control
+system and source distributions, but exclude them from being installed.  So,
+setuptools offers an ``exclude_package_data`` option as well, that allows you
+to do things like this::
+
+    from setuptools import setup, find_packages
+    setup(
+        ...
+        packages = find_packages('src'),  # include all packages under src
+        package_dir = {'':'src'},   # tell distutils packages are under src
+
+        include_package_data = True,    # include everything in source control
+
+        # ...but exclude README.txt from all packages
+        exclude_package_data = { '': ['README.txt'] },
+    )
+
+The ``exclude_package_data`` option is a dictionary mapping package names to
+lists of wildcard patterns, just like the ``package_data`` option.  And, just
+as with that option, a key of ``''`` will apply the given pattern(s) to all
+packages.  However, any files that match these patterns will be *excluded*
+from installation, even if they were listed in ``package_data`` or were
+included as a result of using ``include_package_data``.
+
+In summary, the three options allow you to:
+
+``include_package_data``
+    Accept all data files and directories matched by ``MANIFEST.in`` or found
+    in source control.
+
+``package_data``
+    Specify additional patterns to match files and directories that may or may
+    not be matched by ``MANIFEST.in`` or found in source control.
+
+``exclude_package_data``
+    Specify patterns for data files and directories that should *not* be
+    included when a package is installed, even if they would otherwise have
+    been included due to the use of the preceding options.
+
+NOTE: Due to the way the distutils build process works, a data file that you
+include in your project and then stop including may be "orphaned" in your
+project's build directories, requiring you to run ``setup.py clean --all`` to
+fully remove them.  This may also be important for your users and contributors
+if they track intermediate revisions of your project using Subversion; be sure
+to let them know when you make changes that remove files from inclusion so they
+can run ``setup.py clean --all``.
+
+
+Accessing Data Files at Runtime
+-------------------------------
+
+Typically, existing programs manipulate a package's ``__file__`` attribute in
+order to find the location of data files.  However, this manipulation isn't
+compatible with PEP 302-based import hooks, including importing from zip files
+and Python Eggs.  It is strongly recommended that, if you are using data files,
+you should use the `Resource Management API`_ of ``pkg_resources`` to access
+them.  The ``pkg_resources`` module is distributed as part of setuptools, so if
+you're using setuptools to distribute your package, there is no reason not to
+use its resource management API.  See also `Accessing Package Resources`_ for
+a quick example of converting code that uses ``__file__`` to use
+``pkg_resources`` instead.
+
+.. _Resource Management API: http://peak.telecommunity.com/DevCenter/PythonEggs#resource-management
+.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources
+
+
+Non-Package Data Files
+----------------------
+
+The ``distutils`` normally install general "data files" to a platform-specific
+location (e.g. ``/usr/share``).  This feature intended to be used for things
+like documentation, example configuration files, and the like.  ``setuptools``
+does not install these data files in a separate location, however.  They are
+bundled inside the egg file or directory, alongside the Python modules and
+packages.  The data files can also be accessed using the `Resource Management
+API`_, by specifying a ``Requirement`` instead of a package name::
+
+    from pkg_resources import Requirement, resource_filename
+    filename = resource_filename(Requirement.parse("MyProject"),"sample.conf")
+
+The above code will obtain the filename of the "sample.conf" file in the data
+root of the "MyProject" distribution.
+
+Note, by the way, that this encapsulation of data files means that you can't
+actually install data files to some arbitrary location on a user's machine;
+this is a feature, not a bug.  You can always include a script in your
+distribution that extracts and copies your the documentation or data files to
+a user-specified location, at their discretion.  If you put related data files
+in a single directory, you can use ``resource_filename()`` with the directory
+name to get a filesystem directory that then can be copied with the ``shutil``
+module.  (Even if your package is installed as a zipfile, calling
+``resource_filename()`` on a directory will return an actual filesystem
+directory, whose contents will be that entire subtree of your distribution.)
+
+(Of course, if you're writing a new package, you can just as easily place your
+data files or directories inside one of your packages, rather than using the
+distutils' approach.  However, if you're updating an existing application, it
+may be simpler not to change the way it currently specifies these data files.)
+
+
+Automatic Resource Extraction
+-----------------------------
+
+If you are using tools that expect your resources to be "real" files, or your
+project includes non-extension native libraries or other files that your C
+extensions expect to be able to access, you may need to list those files in
+the ``eager_resources`` argument to ``setup()``, so that the files will be
+extracted together, whenever a C extension in the project is imported.
+
+This is especially important if your project includes shared libraries *other*
+than distutils-built C extensions, and those shared libraries use file
+extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the
+extensions that setuptools 0.6a8 and higher automatically detects as shared
+libraries and adds to the ``native_libs.txt`` file for you.  Any shared
+libraries whose names do not end with one of those extensions should be listed
+as ``eager_resources``, because they need to be present in the filesystem when
+he C extensions that link to them are used.
+
+The ``pkg_resources`` runtime for compressed packages will automatically
+extract *all* C extensions and ``eager_resources`` at the same time, whenever
+*any* C extension or eager resource is requested via the ``resource_filename()``
+API.  (C extensions are imported using ``resource_filename()`` internally.)
+This ensures that C extensions will see all of the "real" files that they
+expect to see.
+
+Note also that you can list directory resource names in ``eager_resources`` as
+well, in which case the directory's contents (including subdirectories) will be
+extracted whenever any C extension or eager resource is requested.
+
+Please note that if you're not sure whether you need to use this argument, you
+don't!  It's really intended to support projects with lots of non-Python
+dependencies and as a last resort for crufty projects that can't otherwise
+handle being compressed.  If your package is pure Python, Python plus data
+files, or Python plus C, you really don't need this.  You've got to be using
+either C or an external program that needs "real" files in your project before
+there's any possibility of ``eager_resources`` being relevant to your project.
+
+
+Extensible Applications and Frameworks
+======================================
+
+
+.. _Entry Points:
+
+Dynamic Discovery of Services and Plugins
+-----------------------------------------
+
+``setuptools`` supports creating libraries that "plug in" to extensible
+applications and frameworks, by letting you register "entry points" in your
+project that can be imported by the application or framework.
+
+For example, suppose that a blogging tool wants to support plugins
+that provide translation for various file types to the blog's output format.
+The framework might define an "entry point group" called ``blogtool.parsers``,
+and then allow plugins to register entry points for the file extensions they
+support.
+
+This would allow people to create distributions that contain one or more
+parsers for different file types, and then the blogging tool would be able to
+find the parsers at runtime by looking up an entry point for the file
+extension (or mime type, or however it wants to).
+
+Note that if the blogging tool includes parsers for certain file formats, it
+can register these as entry points in its own setup script, which means it
+doesn't have to special-case its built-in formats.  They can just be treated
+the same as any other plugin's entry points would be.
+
+If you're creating a project that plugs in to an existing application or
+framework, you'll need to know what entry points or entry point groups are
+defined by that application or framework.  Then, you can register entry points
+in your setup script.  Here are a few examples of ways you might register an
+``.rst`` file parser entry point in the ``blogtool.parsers`` entry point group,
+for our hypothetical blogging tool::
+
+    setup(
+        # ...
+        entry_points = {'blogtool.parsers': '.rst = some_module:SomeClass'}
+    )
+
+    setup(
+        # ...
+        entry_points = {'blogtool.parsers': ['.rst = some_module:a_func']}
+    )
+
+    setup(
+        # ...
+        entry_points = """
+            [blogtool.parsers]
+            .rst = some.nested.module:SomeClass.some_classmethod [reST]
+        """,
+        extras_require = dict(reST = "Docutils>=0.3.5")
+    )
+
+The ``entry_points`` argument to ``setup()`` accepts either a string with
+``.ini``-style sections, or a dictionary mapping entry point group names to
+either strings or lists of strings containing entry point specifiers.  An
+entry point specifier consists of a name and value, separated by an ``=``
+sign.  The value consists of a dotted module name, optionally followed by a
+``:`` and a dotted identifier naming an object within the module.  It can
+also include a bracketed list of "extras" that are required for the entry
+point to be used.  When the invoking application or framework requests loading
+of an entry point, any requirements implied by the associated extras will be
+passed to ``pkg_resources.require()``, so that an appropriate error message
+can be displayed if the needed package(s) are missing.  (Of course, the
+invoking app or framework can ignore such errors if it wants to make an entry
+point optional if a requirement isn't installed.)
+
+
+Defining Additional Metadata
+----------------------------
+
+Some extensible applications and frameworks may need to define their own kinds
+of metadata to include in eggs, which they can then access using the
+``pkg_resources`` metadata APIs.  Ordinarily, this is done by having plugin
+developers include additional files in their ``ProjectName.egg-info``
+directory.  However, since it can be tedious to create such files by hand, you
+may want to create a distutils extension that will create the necessary files
+from arguments to ``setup()``, in much the same way that ``setuptools`` does
+for many of the ``setup()`` arguments it adds.  See the section below on
+`Creating distutils Extensions`_ for more details, especially the subsection on
+`Adding new EGG-INFO Files`_.
+
+
+"Development Mode"
+==================
+
+Under normal circumstances, the ``distutils`` assume that you are going to
+build a distribution of your project, not use it in its "raw" or "unbuilt"
+form.  If you were to use the ``distutils`` that way, you would have to rebuild
+and reinstall your project every time you made a change to it during
+development.
+
+Another problem that sometimes comes up with the ``distutils`` is that you may
+need to do development on two related projects at the same time.  You may need
+to put both projects' packages in the same directory to run them, but need to
+keep them separate for revision control purposes.  How can you do this?
+
+Setuptools allows you to deploy your projects for use in a common directory or
+staging area, but without copying any files.  Thus, you can edit each project's
+code in its checkout directory, and only need to run build commands when you
+change a project's C extensions or similarly compiled files.  You can even
+deploy a project into another project's checkout directory, if that's your
+preferred way of working (as opposed to using a common independent staging area
+or the site-packages directory).
+
+To do this, use the ``setup.py develop`` command.  It works very similarly to
+``setup.py install`` or the EasyInstall tool, except that it doesn't actually
+install anything.  Instead, it creates a special ``.egg-link`` file in the
+deployment directory, that links to your project's source code.  And, if your
+deployment directory is Python's ``site-packages`` directory, it will also
+update the ``easy-install.pth`` file to include your project's source code,
+thereby making it available on ``sys.path`` for all programs using that Python
+installation.
+
+If you have enabled the ``use_2to3`` flag, then of course the ``.egg-link``
+will not link directly to your source code when run under Python 3, since
+that source code would be made for Python 2 and not work under Python 3.
+Instead the ``setup.py develop`` will build Python 3 code under the ``build``
+directory, and link there. This means that after doing code changes you will
+have to run ``setup.py build`` before these changes are picked up by your
+Python 3 installation.
+
+In addition, the ``develop`` command creates wrapper scripts in the target
+script directory that will run your in-development scripts after ensuring that
+all your ``install_requires`` packages are available on ``sys.path``.
+
+You can deploy the same project to multiple staging areas, e.g. if you have
+multiple projects on the same machine that are sharing the same project you're
+doing development work.
+
+When you're done with a given development task, you can remove the project
+source from a staging area using ``setup.py develop --uninstall``, specifying
+the desired staging area if it's not the default.
+
+There are several options to control the precise behavior of the ``develop``
+command; see the section on the `develop`_ command below for more details.
+
+Note that you can also apply setuptools commands to non-setuptools projects,
+using commands like this::
+
+   python -c "import setuptools; execfile('setup.py')" develop
+
+That is, you can simply list the normal setup commands and options following
+the quoted part.
+
+
+Distributing a ``setuptools``-based project
+===========================================
+
+Using ``setuptools``...  Without bundling it!
+---------------------------------------------
+
+Your users might not have ``setuptools`` installed on their machines, or even
+if they do, it might not be the right version.  Fixing this is easy; just
+download `distribute_setup.py`_, and put it in the same directory as your ``setup.py``
+script.  (Be sure to add it to your revision control system, too.)  Then add
+these two lines to the very top of your setup script, before the script imports
+anything from setuptools:
+
+.. code-block:: python
+
+    import distribute_setup
+    distribute_setup.use_setuptools()
+
+That's it.  The ``distribute_setup`` module will automatically download a matching
+version of ``setuptools`` from PyPI, if it isn't present on the target system.
+Whenever you install an updated version of setuptools, you should also update
+your projects' ``distribute_setup.py`` files, so that a matching version gets installed
+on the target machine(s).
+
+By the way, setuptools supports the new PyPI "upload" command, so you can use
+``setup.py sdist upload`` or ``setup.py bdist_egg upload`` to upload your
+source or egg distributions respectively.  Your project's current version must
+be registered with PyPI first, of course; you can use ``setup.py register`` to
+do that.  Or you can do it all in one step, e.g. ``setup.py register sdist
+bdist_egg upload`` will register the package, build source and egg
+distributions, and then upload them both to PyPI, where they'll be easily
+found by other projects that depend on them.
+
+(By the way, if you need to distribute a specific version of ``setuptools``,
+you can specify the exact version and base download URL as parameters to the
+``use_setuptools()`` function.  See the function's docstring for details.)
+
+
+What Your Users Should Know
+---------------------------
+
+In general, a setuptools-based project looks just like any distutils-based
+project -- as long as your users have an internet connection and are installing
+to ``site-packages``, that is.  But for some users, these conditions don't
+apply, and they may become frustrated if this is their first encounter with
+a setuptools-based project.  To keep these users happy, you should review the
+following topics in your project's installation instructions, if they are
+relevant to your project and your target audience isn't already familiar with
+setuptools and ``easy_install``.
+
+Network Access
+    If your project is using ``distribute_setup``, you should inform users of the
+    need to either have network access, or to preinstall the correct version of
+    setuptools using the `EasyInstall installation instructions`_.  Those
+    instructions also have tips for dealing with firewalls as well as how to
+    manually download and install setuptools.
+
+Custom Installation Locations
+    You should inform your users that if they are installing your project to
+    somewhere other than the main ``site-packages`` directory, they should
+    first install setuptools using the instructions for `Custom Installation
+    Locations`_, before installing your project.
+
+Your Project's Dependencies
+    If your project depends on other projects that may need to be downloaded
+    from PyPI or elsewhere, you should list them in your installation
+    instructions, or tell users how to find out what they are.  While most
+    users will not need this information, any users who don't have unrestricted
+    internet access may have to find, download, and install the other projects
+    manually.  (Note, however, that they must still install those projects
+    using ``easy_install``, or your project will not know they are installed,
+    and your setup script will try to download them again.)
+
+    If you want to be especially friendly to users with limited network access,
+    you may wish to build eggs for your project and its dependencies, making
+    them all available for download from your site, or at least create a page
+    with links to all of the needed eggs.  In this way, users with limited
+    network access can manually download all the eggs to a single directory,
+    then use the ``-f`` option of ``easy_install`` to specify the directory
+    to find eggs in.  Users who have full network access can just use ``-f``
+    with the URL of your download page, and ``easy_install`` will find all the
+    needed eggs using your links directly.  This is also useful when your
+    target audience isn't able to compile packages (e.g. most Windows users)
+    and your package or some of its dependencies include C code.
+
+Subversion or CVS Users and Co-Developers
+    Users and co-developers who are tracking your in-development code using
+    CVS, Subversion, or some other revision control system should probably read
+    this manual's sections regarding such development.  Alternately, you may
+    wish to create a quick-reference guide containing the tips from this manual
+    that apply to your particular situation.  For example, if you recommend
+    that people use ``setup.py develop`` when tracking your in-development
+    code, you should let them know that this needs to be run after every update
+    or commit.
+
+    Similarly, if you remove modules or data files from your project, you
+    should remind them to run ``setup.py clean --all`` and delete any obsolete
+    ``.pyc`` or ``.pyo``.  (This tip applies to the distutils in general, not
+    just setuptools, but not everybody knows about them; be kind to your users
+    by spelling out your project's best practices rather than leaving them
+    guessing.)
+
+Creating System Packages
+    Some users want to manage all Python packages using a single package
+    manager, and sometimes that package manager isn't ``easy_install``!
+    Setuptools currently supports ``bdist_rpm``, ``bdist_wininst``, and
+    ``bdist_dumb`` formats for system packaging.  If a user has a locally-
+    installed "bdist" packaging tool that internally uses the distutils
+    ``install`` command, it should be able to work with ``setuptools``.  Some
+    examples of "bdist" formats that this should work with include the
+    ``bdist_nsi`` and ``bdist_msi`` formats for Windows.
+
+    However, packaging tools that build binary distributions by running
+    ``setup.py install`` on the command line or as a subprocess will require
+    modification to work with setuptools.  They should use the
+    ``--single-version-externally-managed`` option to the ``install`` command,
+    combined with the standard ``--root`` or ``--record`` options.
+    See the `install command`_ documentation below for more details.  The
+    ``bdist_deb`` command is an example of a command that currently requires
+    this kind of patching to work with setuptools.
+
+    If you or your users have a problem building a usable system package for
+    your project, please report the problem via the mailing list so that
+    either the "bdist" tool in question or setuptools can be modified to
+    resolve the issue.
+
+
+
+Managing Multiple Projects
+--------------------------
+
+If you're managing several projects that need to use ``distribute_setup``, and you
+are using Subversion as your revision control system, you can use the
+"svn:externals" property to share a single copy of ``distribute_setup`` between
+projects, so that it will always be up-to-date whenever you check out or update
+an individual project, without having to manually update each project to use
+a new version.
+
+However, because Subversion only supports using directories as externals, you
+have to turn ``distribute_setup.py`` into ``distribute_setup/__init__.py`` in order
+to do this, then create "externals" definitions that map the ``distribute_setup``
+directory into each project.  Also, if any of your projects use
+``find_packages()`` on their setup directory, you will need to exclude the
+resulting ``distribute_setup`` package, to keep it from being included in your
+distributions, e.g.::
+
+    setup(
+        ...
+        packages = find_packages(exclude=['distribute_setup']),
+    )
+
+Of course, the ``distribute_setup`` package will still be included in your
+packages' source distributions, as it needs to be.
+
+For your convenience, you may use the following external definition, which will
+track the latest version of setuptools::
+
+    ez_setup svn://svn.eby-sarna.com/svnroot/ez_setup
+
+You can set this by executing this command in your project directory::
+
+    svn propedit svn:externals .
+
+And then adding the line shown above to the file that comes up for editing.
+
+
+Setting the ``zip_safe`` flag
+-----------------------------
+
+For maximum performance, Python packages are best installed as zip files.
+Not all packages, however, are capable of running in compressed form, because
+they may expect to be able to access either source code or data files as
+normal operating system files.  So, ``setuptools`` can install your project
+as a zipfile or a directory, and its default choice is determined by the
+project's ``zip_safe`` flag.
+
+You can pass a True or False value for the ``zip_safe`` argument to the
+``setup()`` function, or you can omit it.  If you omit it, the ``bdist_egg``
+command will analyze your project's contents to see if it can detect any
+conditions that would prevent it from working in a zipfile.  It will output
+notices to the console about any such conditions that it finds.
+
+Currently, this analysis is extremely conservative: it will consider the
+project unsafe if it contains any C extensions or datafiles whatsoever.  This
+does *not* mean that the project can't or won't work as a zipfile!  It just
+means that the ``bdist_egg`` authors aren't yet comfortable asserting that
+the project *will* work.  If the project contains no C or data files, and does
+no ``__file__`` or ``__path__`` introspection or source code manipulation, then
+there is an extremely solid chance the project will work when installed as a
+zipfile.  (And if the project uses ``pkg_resources`` for all its data file
+access, then C extensions and other data files shouldn't be a problem at all.
+See the `Accessing Data Files at Runtime`_ section above for more information.)
+
+However, if ``bdist_egg`` can't be *sure* that your package will work, but
+you've checked over all the warnings it issued, and you are either satisfied it
+*will* work (or if you want to try it for yourself), then you should set
+``zip_safe`` to ``True`` in your ``setup()`` call.  If it turns out that it
+doesn't work, you can always change it to ``False``, which will force
+``setuptools`` to install your project as a directory rather than as a zipfile.
+
+Of course, the end-user can still override either decision, if they are using
+EasyInstall to install your package.  And, if you want to override for testing
+purposes, you can just run ``setup.py easy_install --zip-ok .`` or ``setup.py
+easy_install --always-unzip .`` in your project directory. to install the
+package as a zipfile or directory, respectively.
+
+In the future, as we gain more experience with different packages and become
+more satisfied with the robustness of the ``pkg_resources`` runtime, the
+"zip safety" analysis may become less conservative.  However, we strongly
+recommend that you determine for yourself whether your project functions
+correctly when installed as a zipfile, correct any problems if you can, and
+then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe``
+flag, so that it will not be necessary for ``bdist_egg`` or ``EasyInstall`` to
+try to guess whether your project can work as a zipfile.
+
+
+Namespace Packages
+------------------
+
+Sometimes, a large package is more useful if distributed as a collection of
+smaller eggs.  However, Python does not normally allow the contents of a
+package to be retrieved from more than one location.  "Namespace packages"
+are a solution for this problem.  When you declare a package to be a namespace
+package, it means that the package has no meaningful contents in its
+``__init__.py``, and that it is merely a container for modules and subpackages.
+
+The ``pkg_resources`` runtime will then automatically ensure that the contents
+of namespace packages that are spread over multiple eggs or directories are
+combined into a single "virtual" package.
+
+The ``namespace_packages`` argument to ``setup()`` lets you declare your
+project's namespace packages, so that they will be included in your project's
+metadata.  The argument should list the namespace packages that the egg
+participates in.  For example, the ZopeInterface project might do this::
+
+    setup(
+        # ...
+        namespace_packages = ['zope']
+    )
+
+because it contains a ``zope.interface`` package that lives in the ``zope``
+namespace package.  Similarly, a project for a standalone ``zope.publisher``
+would also declare the ``zope`` namespace package.  When these projects are
+installed and used, Python will see them both as part of a "virtual" ``zope``
+package, even though they will be installed in different locations.
+
+Namespace packages don't have to be top-level packages.  For example, Zope 3's
+``zope.app`` package is a namespace package, and in the future PEAK's
+``peak.util`` package will be too.
+
+Note, by the way, that your project's source tree must include the namespace
+packages' ``__init__.py`` files (and the ``__init__.py`` of any parent
+packages), in a normal Python package layout.  These ``__init__.py`` files
+*must* contain the line::
+
+    __import__('pkg_resources').declare_namespace(__name__)
+
+This code ensures that the namespace package machinery is operating and that
+the current package is registered as a namespace package.
+
+You must NOT include any other code and data in a namespace package's
+``__init__.py``.  Even though it may appear to work during development, or when
+projects are installed as ``.egg`` files, it will not work when the projects
+are installed using "system" packaging tools -- in such cases the
+``__init__.py`` files will not be installed, let alone executed.
+
+You must include the ``declare_namespace()``  line in the ``__init__.py`` of
+*every* project that has contents for the namespace package in question, in
+order to ensure that the namespace will be declared regardless of which
+project's copy of ``__init__.py`` is loaded first.  If the first loaded
+``__init__.py`` doesn't declare it, it will never *be* declared, because no
+other copies will ever be loaded!)
+
+
+TRANSITIONAL NOTE
+~~~~~~~~~~~~~~~~~
+
+Setuptools 0.6a automatically calls ``declare_namespace()`` for you at runtime,
+but the 0.7a versions will *not*.  This is because the automatic declaration
+feature has some negative side effects, such as needing to import all namespace
+packages during the initialization of the ``pkg_resources`` runtime, and also
+the need for ``pkg_resources`` to be explicitly imported before any namespace
+packages work at all.  Beginning with the 0.7a releases, you'll be responsible
+for including your own declaration lines, and the automatic declaration feature
+will be dropped to get rid of the negative side effects.
+
+During the remainder of the 0.6 development cycle, therefore, setuptools will
+warn you about missing ``declare_namespace()`` calls in your ``__init__.py``
+files, and you should correct these as soon as possible before setuptools 0.7a1
+is released.  Namespace packages without declaration lines will not work
+correctly once a user has upgraded to setuptools 0.7a1, so it's important that
+you make this change now in order to avoid having your code break in the field.
+Our apologies for the inconvenience, and thank you for your patience.
+
+
+
+Tagging and "Daily Build" or "Snapshot" Releases
+------------------------------------------------
+
+When a set of related projects are under development, it may be important to
+track finer-grained version increments than you would normally use for e.g.
+"stable" releases.  While stable releases might be measured in dotted numbers
+with alpha/beta/etc. status codes, development versions of a project often
+need to be tracked by revision or build number or even build date.  This is
+especially true when projects in development need to refer to one another, and
+therefore may literally need an up-to-the-minute version of something!
+
+To support these scenarios, ``setuptools`` allows you to "tag" your source and
+egg distributions by adding one or more of the following to the project's
+"official" version identifier:
+
+* A manually-specified pre-release tag, such as "build" or "dev", or a
+  manually-specified post-release tag, such as a build or revision number
+  (``--tag-build=STRING, -bSTRING``)
+
+* A "last-modified revision number" string generated automatically from
+  Subversion's metadata (assuming your project is being built from a Subversion
+  "working copy")  (``--tag-svn-revision, -r``)
+
+* An 8-character representation of the build date (``--tag-date, -d``), as
+  a postrelease tag
+
+You can add these tags by adding ``egg_info`` and the desired options to
+the command line ahead of the ``sdist`` or ``bdist`` commands that you want
+to generate a daily build or snapshot for.  See the section below on the
+`egg_info`_ command for more details.
+
+(Also, before you release your project, be sure to see the section above on
+`Specifying Your Project's Version`_ for more information about how pre- and
+post-release tags affect how setuptools and EasyInstall interpret version
+numbers.  This is important in order to make sure that dependency processing
+tools will know which versions of your project are newer than others.)
+
+Finally, if you are creating builds frequently, and either building them in a
+downloadable location or are copying them to a distribution server, you should
+probably also check out the `rotate`_ command, which lets you automatically
+delete all but the N most-recently-modified distributions matching a glob
+pattern.  So, you can use a command line like::
+
+    setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3
+
+to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the
+most recent Subversion revision that affected the source tree), and then
+delete any egg files from the distribution directory except for the three
+that were built most recently.
+
+If you have to manage automated builds for multiple packages, each with
+different tagging and rotation policies, you may also want to check out the
+`alias`_ command, which would let each package define an alias like ``daily``
+that would perform the necessary tag, build, and rotate commands.  Then, a
+simpler script or cron job could just run ``setup.py daily`` in each project
+directory.  (And, you could also define sitewide or per-user default versions
+of the ``daily`` alias, so that projects that didn't define their own would
+use the appropriate defaults.)
+
+
+Generating Source Distributions
+-------------------------------
+
+``setuptools`` enhances the distutils' default algorithm for source file
+selection, so that all files managed by CVS or Subversion in your project tree
+are included in any source distribution you build.  This is a big improvement
+over having to manually write a ``MANIFEST.in`` file and try to keep it in
+sync with your project.  So, if you are using CVS or Subversion, and your
+source distributions only need to include files that you're tracking in
+revision control, don't create a a ``MANIFEST.in`` file for your project.
+(And, if you already have one, you might consider deleting it the next time
+you would otherwise have to change it.)
+
+(NOTE: other revision control systems besides CVS and Subversion can be
+supported using plugins; see the section below on `Adding Support for Other
+Revision Control Systems`_ for information on how to write such plugins.)
+
+If you need to include automatically generated files, or files that are kept in
+an unsupported revision control system, you'll need to create a ``MANIFEST.in``
+file to specify any files that the default file location algorithm doesn't
+catch.  See the distutils documentation for more information on the format of
+the ``MANIFEST.in`` file.
+
+But, be sure to ignore any part of the distutils documentation that deals with
+``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you
+from these issues and doesn't work the same way in any case.  Unlike the
+distutils, setuptools regenerates the source distribution manifest file
+every time you build a source distribution, and it builds it inside the
+project's ``.egg-info`` directory, out of the way of your main project
+directory.  You therefore need not worry about whether it is up-to-date or not.
+
+Indeed, because setuptools' approach to determining the contents of a source
+distribution is so much simpler, its ``sdist`` command omits nearly all of
+the options that the distutils' more complex ``sdist`` process requires.  For
+all practical purposes, you'll probably use only the ``--formats`` option, if
+you use any option at all.
+
+(By the way, if you're using some other revision control system, you might
+consider creating and publishing a `revision control plugin for setuptools`_.)
+
+
+.. _revision control plugin for setuptools: `Adding Support for Other Revision Control Systems`_
+
+
+Making your package available for EasyInstall
+---------------------------------------------
+
+If you use the ``register`` command (``setup.py register``) to register your
+package with PyPI, that's most of the battle right there.  (See the
+`docs for the register command`_ for more details.)
+
+.. _docs for the register command: http://docs.python.org/dist/package-index.html
+
+If you also use the `upload`_ command to upload actual distributions of your
+package, that's even better, because EasyInstall will be able to find and
+download them directly from your project's PyPI page.
+
+However, there may be reasons why you don't want to upload distributions to
+PyPI, and just want your existing distributions (or perhaps a Subversion
+checkout) to be used instead.
+
+So here's what you need to do before running the ``register`` command.  There
+are three ``setup()`` arguments that affect EasyInstall:
+
+``url`` and ``download_url``
+   These become links on your project's PyPI page.  EasyInstall will examine
+   them to see if they link to a package ("primary links"), or whether they are
+   HTML pages.  If they're HTML pages, EasyInstall scans all HREF's on the
+   page for primary links
+
+``long_description``
+   EasyInstall will check any URLs contained in this argument to see if they
+   are primary links.
+
+A URL is considered a "primary link" if it is a link to a .tar.gz, .tgz, .zip,
+.egg, .egg.zip, .tar.bz2, or .exe file, or if it has an ``#egg=project`` or
+``#egg=project-version`` fragment identifier attached to it.  EasyInstall
+attempts to determine a project name and optional version number from the text
+of a primary link *without* downloading it.  When it has found all the primary
+links, EasyInstall will select the best match based on requested version,
+platform compatibility, and other criteria.
+
+So, if your ``url`` or ``download_url`` point either directly to a downloadable
+source distribution, or to HTML page(s) that have direct links to such, then
+EasyInstall will be able to locate downloads automatically.  If you want to
+make Subversion checkouts available, then you should create links with either
+``#egg=project`` or ``#egg=project-version`` added to the URL.  You should
+replace ``project`` and ``version`` with the values they would have in an egg
+filename.  (Be sure to actually generate an egg and then use the initial part
+of the filename, rather than trying to guess what the escaped form of the
+project name and version number will be.)
+
+Note that Subversion checkout links are of lower precedence than other kinds
+of distributions, so EasyInstall will not select a Subversion checkout for
+downloading unless it has a version included in the ``#egg=`` suffix, and
+it's a higher version than EasyInstall has seen in any other links for your
+project.
+
+As a result, it's a common practice to use mark checkout URLs with a version of
+"dev" (i.e., ``#egg=projectname-dev``), so that users can do something like
+this::
+
+    easy_install --editable projectname==dev
+
+in order to check out the in-development version of ``projectname``.
+
+
+Managing "Continuous Releases" Using Subversion
+-----------------------------------------------
+
+If you expect your users to track in-development versions of your project via
+Subversion, there are a few additional steps you should take to ensure that
+things work smoothly with EasyInstall.  First, you should add the following
+to your project's ``setup.cfg`` file:
+
+.. code-block:: ini
+
+    [egg_info]
+    tag_build = .dev
+    tag_svn_revision = 1
+
+This will tell ``setuptools`` to generate package version numbers like
+``1.0a1.dev-r1263``, which will be considered to be an *older* release than
+``1.0a1``.  Thus, when you actually release ``1.0a1``, the entire egg
+infrastructure (including ``setuptools``, ``pkg_resources`` and EasyInstall)
+will know that ``1.0a1`` supersedes any interim snapshots from Subversion, and
+handle upgrades accordingly.
+
+(Note: the project version number you specify in ``setup.py`` should always be
+the *next* version of your software, not the last released version.
+Alternately, you can leave out the ``tag_build=.dev``, and always use the
+*last* release as a version number, so that your post-1.0 builds are labelled
+``1.0-r1263``, indicating a post-1.0 patchlevel.  Most projects so far,
+however, seem to prefer to think of their project as being a future version
+still under development, rather than a past version being patched.  It is of
+course possible for a single project to have both situations, using
+post-release numbering on release branches, and pre-release numbering on the
+trunk.  But you don't have to make things this complex if you don't want to.)
+
+Commonly, projects releasing code from Subversion will include a PyPI link to
+their checkout URL (as described in the previous section) with an
+``#egg=projectname-dev`` suffix.  This allows users to request EasyInstall
+to download ``projectname==dev`` in order to get the latest in-development
+code.  Note that if your project depends on such in-progress code, you may wish
+to specify your ``install_requires`` (or other requirements) to include
+``==dev``, e.g.:
+
+.. code-block:: python
+
+    install_requires = ["OtherProject>=0.2a1.dev-r143,==dev"]
+
+The above example says, "I really want at least this particular development
+revision number, but feel free to follow and use an ``#egg=OtherProject-dev``
+link if you find one".  This avoids the need to have actual source or binary
+distribution snapshots of in-development code available, just to be able to
+depend on the latest and greatest a project has to offer.
+
+A final note for Subversion development: if you are using SVN revision tags
+as described in this section, it's a good idea to run ``setup.py develop``
+after each Subversion checkin or update, because your project's version number
+will be changing, and your script wrappers need to be updated accordingly.
+
+Also, if the project's requirements have changed, the ``develop`` command will
+take care of fetching the updated dependencies, building changed extensions,
+etc.  Be sure to also remind any of your users who check out your project
+from Subversion that they need to run ``setup.py develop`` after every update
+in order to keep their checkout completely in sync.
+
+
+Making "Official" (Non-Snapshot) Releases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When you make an official release, creating source or binary distributions,
+you will need to override the tag settings from ``setup.cfg``, so that you
+don't end up registering versions like ``foobar-0.7a1.dev-r34832``.  This is
+easy to do if you are developing on the trunk and using tags or branches for
+your releases - just make the change to ``setup.cfg`` after branching or
+tagging the release, so the trunk will still produce development snapshots.
+
+Alternately, if you are not branching for releases, you can override the
+default version options on the command line, using something like::
+
+    python setup.py egg_info -RDb "" sdist bdist_egg register upload
+
+The first part of this command (``egg_info -RDb ""``) will override the
+configured tag information, before creating source and binary eggs, registering
+the project with PyPI, and uploading the files.  Thus, these commands will use
+the plain version from your ``setup.py``, without adding the Subversion
+revision number or build designation string.
+
+Of course, if you will be doing this a lot, you may wish to create a personal
+alias for this operation, e.g.::
+
+    python setup.py alias -u release egg_info -RDb ""
+
+You can then use it like this::
+
+    python setup.py release sdist bdist_egg register upload
+
+Or of course you can create more elaborate aliases that do all of the above.
+See the sections below on the `egg_info`_ and `alias`_ commands for more ideas.
+
+
+
+Distributing Extensions compiled with Pyrex
+-------------------------------------------
+
+``setuptools`` includes transparent support for building Pyrex extensions, as
+long as you define your extensions using ``setuptools.Extension``, *not*
+``distutils.Extension``.  You must also not import anything from Pyrex in
+your setup script.
+
+If you follow these rules, you can safely list ``.pyx`` files as the source
+of your ``Extension`` objects in the setup script.  ``setuptools`` will detect
+at build time whether Pyrex is installed or not.  If it is, then ``setuptools``
+will use it.  If not, then ``setuptools`` will silently change the
+``Extension`` objects to refer to the ``.c`` counterparts of the ``.pyx``
+files, so that the normal distutils C compilation process will occur.
+
+Of course, for this to work, your source distributions must include the C
+code generated by Pyrex, as well as your original ``.pyx`` files.  This means
+that you will probably want to include current ``.c`` files in your revision
+control system, rebuilding them whenever you check changes in for the ``.pyx``
+source files.  This will ensure that people tracking your project in CVS or
+Subversion will be able to build it even if they don't have Pyrex installed,
+and that your source releases will be similarly usable with or without Pyrex.
+
+
+-----------------
+Command Reference
+-----------------
+
+.. _alias:
+
+``alias`` - Define shortcuts for commonly used commands
+=======================================================
+
+Sometimes, you need to use the same commands over and over, but you can't
+necessarily set them as defaults.  For example, if you produce both development
+snapshot releases and "stable" releases of a project, you may want to put
+the distributions in different places, or use different ``egg_info`` tagging
+options, etc.  In these cases, it doesn't make sense to set the options in
+a distutils configuration file, because the values of the options changed based
+on what you're trying to do.
+
+Setuptools therefore allows you to define "aliases" - shortcut names for
+an arbitrary string of commands and options, using ``setup.py alias aliasname
+expansion``, where aliasname is the name of the new alias, and the remainder of
+the command line supplies its expansion.  For example, this command defines
+a sitewide alias called "daily", that sets various ``egg_info`` tagging
+options::
+
+    setup.py alias --global-config daily egg_info --tag-svn-revision \
+        --tag-build=development
+
+Once the alias is defined, it can then be used with other setup commands,
+e.g.::
+
+    setup.py daily bdist_egg        # generate a daily-build .egg file
+    setup.py daily sdist            # generate a daily-build source distro
+    setup.py daily sdist bdist_egg  # generate both
+
+The above commands are interpreted as if the word ``daily`` were replaced with
+``egg_info --tag-svn-revision --tag-build=development``.
+
+Note that setuptools will expand each alias *at most once* in a given command
+line.  This serves two purposes.  First, if you accidentally create an alias
+loop, it will have no effect; you'll instead get an error message about an
+unknown command.  Second, it allows you to define an alias for a command, that
+uses that command.  For example, this (project-local) alias::
+
+    setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg
+
+redefines the ``bdist_egg`` command so that it always runs the ``rotate``
+command afterwards to delete all but the newest egg file.  It doesn't loop
+indefinitely on ``bdist_egg`` because the alias is only expanded once when
+used.
+
+You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.::
+
+    setup.py alias --global-config --remove daily
+
+would delete the "daily" alias we defined above.
+
+Aliases can be defined on a project-specific, per-user, or sitewide basis.  The
+default is to define or remove a project-specific alias, but you can use any of
+the `configuration file options`_ (listed under the `saveopts`_ command, below)
+to determine which distutils configuration file an aliases will be added to
+(or removed from).
+
+Note that if you omit the "expansion" argument to the ``alias`` command,
+you'll get output showing that alias' current definition (and what
+configuration file it's defined in).  If you omit the alias name as well,
+you'll get a listing of all current aliases along with their configuration
+file locations.
+
+
+``bdist_egg`` - Create a Python Egg for the project
+===================================================
+
+This command generates a Python Egg (``.egg`` file) for the project.  Python
+Eggs are the preferred binary distribution format for EasyInstall, because they
+are cross-platform (for "pure" packages), directly importable, and contain
+project metadata including scripts and information about the project's
+dependencies.  They can be simply downloaded and added to ``sys.path``
+directly, or they can be placed in a directory on ``sys.path`` and then
+automatically discovered by the egg runtime system.
+
+This command runs the `egg_info`_ command (if it hasn't already run) to update
+the project's metadata (``.egg-info``) directory.  If you have added any extra
+metadata files to the ``.egg-info`` directory, those files will be included in
+the new egg file's metadata directory, for use by the egg runtime system or by
+any applications or frameworks that use that metadata.
+
+You won't usually need to specify any special options for this command; just
+use ``bdist_egg`` and you're done.  But there are a few options that may
+be occasionally useful:
+
+``--dist-dir=DIR, -d DIR``
+    Set the directory where the ``.egg`` file will be placed.  If you don't
+    supply this, then the ``--dist-dir`` setting of the ``bdist`` command
+    will be used, which is usually a directory named ``dist`` in the project
+    directory.
+
+``--plat-name=PLATFORM, -p PLATFORM``
+    Set the platform name string that will be embedded in the egg's filename
+    (assuming the egg contains C extensions).  This can be used to override
+    the distutils default platform name with something more meaningful.  Keep
+    in mind, however, that the egg runtime system expects to see eggs with
+    distutils platform names, so it may ignore or reject eggs with non-standard
+    platform names.  Similarly, the EasyInstall program may ignore them when
+    searching web pages for download links.  However, if you are
+    cross-compiling or doing some other unusual things, you might find a use
+    for this option.
+
+``--exclude-source-files``
+    Don't include any modules' ``.py`` files in the egg, just compiled Python,
+    C, and data files.  (Note that this doesn't affect any ``.py`` files in the
+    EGG-INFO directory or its subdirectories, since for example there may be
+    scripts with a ``.py`` extension which must still be retained.)  We don't
+    recommend that you use this option except for packages that are being
+    bundled for proprietary end-user applications, or for "embedded" scenarios
+    where space is at an absolute premium.  On the other hand, if your package
+    is going to be installed and used in compressed form, you might as well
+    exclude the source because Python's ``traceback`` module doesn't currently
+    understand how to display zipped source code anyway, or how to deal with
+    files that are in a different place from where their code was compiled.
+
+There are also some options you will probably never need, but which are there
+because they were copied from similar ``bdist`` commands used as an example for
+creating this one.  They may be useful for testing and debugging, however,
+which is why we kept them:
+
+``--keep-temp, -k``
+    Keep the contents of the ``--bdist-dir`` tree around after creating the
+    ``.egg`` file.
+
+``--bdist-dir=DIR, -b DIR``
+    Set the temporary directory for creating the distribution.  The entire
+    contents of this directory are zipped to create the ``.egg`` file, after
+    running various installation commands to copy the package's modules, data,
+    and extensions here.
+
+``--skip-build``
+    Skip doing any "build" commands; just go straight to the
+    install-and-compress phases.
+
+
+.. _develop:
+
+``develop`` - Deploy the project source in "Development Mode"
+=============================================================
+
+This command allows you to deploy your project's source for use in one or more
+"staging areas" where it will be available for importing.  This deployment is
+done in such a way that changes to the project source are immediately available
+in the staging area(s), without needing to run a build or install step after
+each change.
+
+The ``develop`` command works by creating an ``.egg-link`` file (named for the
+project) in the given staging area.  If the staging area is Python's
+``site-packages`` directory, it also updates an ``easy-install.pth`` file so
+that the project is on ``sys.path`` by default for all programs run using that
+Python installation.
+
+The ``develop`` command also installs wrapper scripts in the staging area (or
+a separate directory, as specified) that will ensure the project's dependencies
+are available on ``sys.path`` before running the project's source scripts.
+And, it ensures that any missing project dependencies are available in the
+staging area, by downloading and installing them if necessary.
+
+Last, but not least, the ``develop`` command invokes the ``build_ext -i``
+command to ensure any C extensions in the project have been built and are
+up-to-date, and the ``egg_info`` command to ensure the project's metadata is
+updated (so that the runtime and wrappers know what the project's dependencies
+are).  If you make any changes to the project's setup script or C extensions,
+you should rerun the ``develop`` command against all relevant staging areas to
+keep the project's scripts, metadata and extensions up-to-date.  Most other
+kinds of changes to your project should not require any build operations or
+rerunning ``develop``, but keep in mind that even minor changes to the setup
+script (e.g. changing an entry point definition) require you to re-run the
+``develop`` or ``test`` commands to keep the distribution updated.
+
+Here are some of the options that the ``develop`` command accepts.  Note that
+they affect the project's dependencies as well as the project itself, so if you
+have dependencies that need to be installed and you use ``--exclude-scripts``
+(for example), the dependencies' scripts will not be installed either!  For
+this reason, you may want to use EasyInstall to install the project's
+dependencies before using the ``develop`` command, if you need finer control
+over the installation options for dependencies.
+
+``--uninstall, -u``
+    Un-deploy the current project.  You may use the ``--install-dir`` or ``-d``
+    option to designate the staging area.  The created ``.egg-link`` file will
+    be removed, if present and it is still pointing to the project directory.
+    The project directory will be removed from ``easy-install.pth`` if the
+    staging area is Python's ``site-packages`` directory.
+
+    Note that this option currently does *not* uninstall script wrappers!  You
+    must uninstall them yourself, or overwrite them by using EasyInstall to
+    activate a different version of the package.  You can also avoid installing
+    script wrappers in the first place, if you use the ``--exclude-scripts``
+    (aka ``-x``) option when you run ``develop`` to deploy the project.
+
+``--multi-version, -m``
+    "Multi-version" mode. Specifying this option prevents ``develop`` from
+    adding an ``easy-install.pth`` entry for the project(s) being deployed, and
+    if an entry for any version of a project already exists, the entry will be
+    removed upon successful deployment.  In multi-version mode, no specific
+    version of the package is available for importing, unless you use
+    ``pkg_resources.require()`` to put it on ``sys.path``, or you are running
+    a wrapper script generated by ``setuptools`` or EasyInstall.  (In which
+    case the wrapper script calls ``require()`` for you.)
+
+    Note that if you install to a directory other than ``site-packages``,
+    this option is automatically in effect, because ``.pth`` files can only be
+    used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use
+    the ``--install-dir`` or ``-d`` option (or they are set via configuration
+    file(s)) your project and its dependencies will be deployed in multi-
+    version mode.
+
+``--install-dir=DIR, -d DIR``
+    Set the installation directory (staging area).  If this option is not
+    directly specified on the command line or in a distutils configuration
+    file, the distutils default installation location is used.  Normally, this
+    will be the ``site-packages`` directory, but if you are using distutils
+    configuration files, setting things like ``prefix`` or ``install_lib``,
+    then those settings are taken into account when computing the default
+    staging area.
+
+``--script-dir=DIR, -s DIR``
+    Set the script installation directory.  If you don't supply this option
+    (via the command line or a configuration file), but you *have* supplied
+    an ``--install-dir`` (via command line or config file), then this option
+    defaults to the same directory, so that the scripts will be able to find
+    their associated package installation.  Otherwise, this setting defaults
+    to the location where the distutils would normally install scripts, taking
+    any distutils configuration file settings into account.
+
+``--exclude-scripts, -x``
+    Don't deploy script wrappers.  This is useful if you don't want to disturb
+    existing versions of the scripts in the staging area.
+
+``--always-copy, -a``
+    Copy all needed distributions to the staging area, even if they
+    are already present in another directory on ``sys.path``.  By default, if
+    a requirement can be met using a distribution that is already available in
+    a directory on ``sys.path``, it will not be copied to the staging area.
+
+``--egg-path=DIR``
+    Force the generated ``.egg-link`` file to use a specified relative path
+    to the source directory.  This can be useful in circumstances where your
+    installation directory is being shared by code running under multiple
+    platforms (e.g. Mac and Windows) which have different absolute locations
+    for the code under development, but the same *relative* locations with
+    respect to the installation directory.  If you use this option when
+    installing, you must supply the same relative path when uninstalling.
+
+In addition to the above options, the ``develop`` command also accepts all of
+the same options accepted by ``easy_install``.  If you've configured any
+``easy_install`` settings in your ``setup.cfg`` (or other distutils config
+files), the ``develop`` command will use them as defaults, unless you override
+them in a ``[develop]`` section or on the command line.
+
+
+``easy_install`` - Find and install packages
+============================================
+
+This command runs the `EasyInstall tool
+<http://peak.telecommunity.com/DevCenter/EasyInstall>`_ for you.  It is exactly
+equivalent to running the ``easy_install`` command.  All command line arguments
+following this command are consumed and not processed further by the distutils,
+so this must be the last command listed on the command line.  Please see
+the EasyInstall documentation for the options reference and usage examples.
+Normally, there is no reason to use this command via the command line, as you
+can just use ``easy_install`` directly.  It's only listed here so that you know
+it's a distutils command, which means that you can:
+
+* create command aliases that use it,
+* create distutils extensions that invoke it as a subcommand, and
+* configure options for it in your ``setup.cfg`` or other distutils config
+  files.
+
+
+.. _egg_info:
+
+``egg_info`` - Create egg metadata and set build tags
+=====================================================
+
+This command performs two operations: it updates a project's ``.egg-info``
+metadata directory (used by the ``bdist_egg``, ``develop``, and ``test``
+commands), and it allows you to temporarily change a project's version string,
+to support "daily builds" or "snapshot" releases.  It is run automatically by
+the ``sdist``, ``bdist_egg``, ``develop``, ``register``, and ``test`` commands
+in order to update the project's metadata, but you can also specify it
+explicitly in order to temporarily change the project's version string while
+executing other commands.  (It also generates the``.egg-info/SOURCES.txt``
+manifest file, which is used when you are building source distributions.)
+
+In addition to writing the core egg metadata defined by ``setuptools`` and
+required by ``pkg_resources``, this command can be extended to write other
+metadata files as well, by defining entry points in the ``egg_info.writers``
+group.  See the section on `Adding new EGG-INFO Files`_ below for more details.
+Note that using additional metadata writers may require you to include a
+``setup_requires`` argument to ``setup()`` in order to ensure that the desired
+writers are available on ``sys.path``.
+
+
+Release Tagging Options
+-----------------------
+
+The following options can be used to modify the project's version string for
+all remaining commands on the setup command line.  The options are processed
+in the order shown, so if you use more than one, the requested tags will be
+added in the following order:
+
+``--tag-build=NAME, -b NAME``
+    Append NAME to the project's version string.  Due to the way setuptools
+    processes "pre-release" version suffixes beginning with the letters "a"
+    through "e" (like "alpha", "beta", and "candidate"), you will usually want
+    to use a tag like ".build" or ".dev", as this will cause the version number
+    to be considered *lower* than the project's default version.  (If you
+    want to make the version number *higher* than the default version, you can
+    always leave off --tag-build and then use one or both of the following
+    options.)
+
+    If you have a default build tag set in your ``setup.cfg``, you can suppress
+    it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument
+    to the ``egg_info`` command.
+
+``--tag-svn-revision, -r``
+    If the current directory is a Subversion checkout (i.e. has a ``.svn``
+    subdirectory, this appends a string of the form "-rNNNN" to the project's
+    version string, where NNNN is the revision number of the most recent
+    modification to the current directory, as obtained from the ``svn info``
+    command.
+
+    If the current directory is not a Subversion checkout, the command will
+    look for a ``PKG-INFO`` file instead, and try to find the revision number
+    from that, by looking for a "-rNNNN" string at the end of the version
+    number.  (This is so that building a package from a source distribution of
+    a Subversion snapshot will produce a binary with the correct version
+    number.)
+
+    If there is no ``PKG-INFO`` file, or the version number contained therein
+    does not end with ``-r`` and a number, then ``-r0`` is used.
+
+``--no-svn-revision, -R``
+    Don't include the Subversion revision in the version number.  This option
+    is included so you can override a default setting put in ``setup.cfg``.
+
+``--tag-date, -d``
+    Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the
+    project's version number.
+
+``--no-date, -D``
+    Don't include a date stamp in the version number.  This option is included
+    so you can override a default setting in ``setup.cfg``.
+
+
+(Note: Because these options modify the version number used for source and
+binary distributions of your project, you should first make sure that you know
+how the resulting version numbers will be interpreted by automated tools
+like EasyInstall.  See the section above on `Specifying Your Project's
+Version`_ for an explanation of pre- and post-release tags, as well as tips on
+how to choose and verify a versioning scheme for your your project.)
+
+For advanced uses, there is one other option that can be set, to change the
+location of the project's ``.egg-info`` directory.  Commands that need to find
+the project's source directory or metadata should get it from this setting:
+
+
+Other ``egg_info`` Options
+--------------------------
+
+``--egg-base=SOURCEDIR, -e SOURCEDIR``
+    Specify the directory that should contain the .egg-info directory.  This
+    should normally be the root of your project's source tree (which is not
+    necessarily the same as your project directory; some projects use a ``src``
+    or ``lib`` subdirectory as the source root).  You should not normally need
+    to specify this directory, as it is normally determined from the
+    ``package_dir`` argument to the ``setup()`` function, if any.  If there is
+    no ``package_dir`` set, this option defaults to the current directory.
+
+
+``egg_info`` Examples
+---------------------
+
+Creating a dated "nightly build" snapshot egg::
+
+    python setup.py egg_info --tag-date --tag-build=DEV bdist_egg
+
+Creating and uploading a release with no version tags, even if some default
+tags are specified in ``setup.cfg``::
+
+    python setup.py egg_info -RDb "" sdist bdist_egg register upload
+
+(Notice that ``egg_info`` must always appear on the command line *before* any
+commands that you want the version changes to apply to.)
+
+
+.. _install command:
+
+``install`` - Run ``easy_install`` or old-style installation
+============================================================
+
+The setuptools ``install`` command is basically a shortcut to run the
+``easy_install`` command on the current project.  However, for convenience
+in creating "system packages" of setuptools-based projects, you can also
+use this option:
+
+``--single-version-externally-managed``
+    This boolean option tells the ``install`` command to perform an "old style"
+    installation, with the addition of an ``.egg-info`` directory so that the
+    installed project will still have its metadata available and operate
+    normally.  If you use this option, you *must* also specify the ``--root``
+    or ``--record`` options (or both), because otherwise you will have no way
+    to identify and remove the installed files.
+
+This option is automatically in effect when ``install`` is invoked by another
+distutils command, so that commands like ``bdist_wininst`` and ``bdist_rpm``
+will create system packages of eggs.  It is also automatically in effect if
+you specify the ``--root`` option.
+
+
+``install_egg_info`` - Install an ``.egg-info`` directory in ``site-packages``
+==============================================================================
+
+Setuptools runs this command as part of ``install`` operations that use the
+``--single-version-externally-managed`` options.  You should not invoke it
+directly; it is documented here for completeness and so that distutils
+extensions such as system package builders can make use of it.  This command
+has only one option:
+
+``--install-dir=DIR, -d DIR``
+    The parent directory where the ``.egg-info`` directory will be placed.
+    Defaults to the same as the ``--install-dir`` option specified for the
+    ``install_lib`` command, which is usually the system ``site-packages``
+    directory.
+
+This command assumes that the ``egg_info`` command has been given valid options
+via the command line or ``setup.cfg``, as it will invoke the ``egg_info``
+command and use its options to locate the project's source ``.egg-info``
+directory.
+
+
+.. _rotate:
+
+``rotate`` - Delete outdated distribution files
+===============================================
+
+As you develop new versions of your project, your distribution (``dist``)
+directory will gradually fill up with older source and/or binary distribution
+files.  The ``rotate`` command lets you automatically clean these up, keeping
+only the N most-recently modified files matching a given pattern.
+
+``--match=PATTERNLIST, -m PATTERNLIST``
+    Comma-separated list of glob patterns to match.  This option is *required*.
+    The project name and ``-*`` is prepended to the supplied patterns, in order
+    to match only distributions belonging to the current project (in case you
+    have a shared distribution directory for multiple projects).  Typically,
+    you will use a glob pattern like ``.zip`` or ``.egg`` to match files of
+    the specified type.  Note that each supplied pattern is treated as a
+    distinct group of files for purposes of selecting files to delete.
+
+``--keep=COUNT, -k COUNT``
+    Number of matching distributions to keep.  For each group of files
+    identified by a pattern specified with the ``--match`` option, delete all
+    but the COUNT most-recently-modified files in that group.  This option is
+    *required*.
+
+``--dist-dir=DIR, -d DIR``
+    Directory where the distributions are.  This defaults to the value of the
+    ``bdist`` command's ``--dist-dir`` option, which will usually be the
+    project's ``dist`` subdirectory.
+
+**Example 1**: Delete all .tar.gz files from the distribution directory, except
+for the 3 most recently modified ones::
+
+    setup.py rotate --match=.tar.gz --keep=3
+
+**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution
+directory, except the most recently modified one for each Python version::
+
+    setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1
+
+
+.. _saveopts:
+
+``saveopts`` - Save used options to a configuration file
+========================================================
+
+Finding and editing ``distutils`` configuration files can be a pain, especially
+since you also have to translate the configuration options from command-line
+form to the proper configuration file format.  You can avoid these hassles by
+using the ``saveopts`` command.  Just add it to the command line to save the
+options you used.  For example, this command builds the project using
+the ``mingw32`` C compiler, then saves the --compiler setting as the default
+for future builds (even those run implicitly by the ``install`` command)::
+
+    setup.py build --compiler=mingw32 saveopts
+
+The ``saveopts`` command saves all options for every commmand specified on the
+command line to the project's local ``setup.cfg`` file, unless you use one of
+the `configuration file options`_ to change where the options are saved.  For
+example, this command does the same as above, but saves the compiler setting
+to the site-wide (global) distutils configuration::
+
+    setup.py build --compiler=mingw32 saveopts -g
+
+Note that it doesn't matter where you place the ``saveopts`` command on the
+command line; it will still save all the options specified for all commands.
+For example, this is another valid way to spell the last example::
+
+    setup.py saveopts -g build --compiler=mingw32
+
+Note, however, that all of the commands specified are always run, regardless of
+where ``saveopts`` is placed on the command line.
+
+
+Configuration File Options
+--------------------------
+
+Normally, settings such as options and aliases are saved to the project's
+local ``setup.cfg`` file.  But you can override this and save them to the
+global or per-user configuration files, or to a manually-specified filename.
+
+``--global-config, -g``
+    Save settings to the global ``distutils.cfg`` file inside the ``distutils``
+    package directory.  You must have write access to that directory to use
+    this option.  You also can't combine this option with ``-u`` or ``-f``.
+
+``--user-config, -u``
+    Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or
+    ``$HOME/pydistutils.cfg`` (Windows) file.  You can't combine this option
+    with ``-g`` or ``-f``.
+
+``--filename=FILENAME, -f FILENAME``
+    Save settings to the specified configuration file to use.  You can't
+    combine this option with ``-g`` or ``-u``.  Note that if you specify a
+    non-standard filename, the ``distutils`` and ``setuptools`` will not
+    use the file's contents.  This option is mainly included for use in
+    testing.
+
+These options are used by other ``setuptools`` commands that modify
+configuration files, such as the `alias`_ and `setopt`_ commands.
+
+
+.. _setopt:
+
+``setopt`` - Set a distutils or setuptools option in a config file
+==================================================================
+
+This command is mainly for use by scripts, but it can also be used as a quick
+and dirty way to change a distutils configuration option without having to
+remember what file the options are in and then open an editor.
+
+**Example 1**.  Set the default C compiler to ``mingw32`` (using long option
+names)::
+
+    setup.py setopt --command=build --option=compiler --set-value=mingw32
+
+**Example 2**.  Remove any setting for the distutils default package
+installation directory (short option names)::
+
+    setup.py setopt -c install -o install_lib -r
+
+
+Options for the ``setopt`` command:
+
+``--command=COMMAND, -c COMMAND``
+    Command to set the option for.  This option is required.
+
+``--option=OPTION, -o OPTION``
+    The name of the option to set.  This option is required.
+
+``--set-value=VALUE, -s VALUE``
+    The value to set the option to.  Not needed if ``-r`` or ``--remove`` is
+    set.
+
+``--remove, -r``
+    Remove (unset) the option, instead of setting it.
+
+In addition to the above options, you may use any of the `configuration file
+options`_ (listed under the `saveopts`_ command, above) to determine which
+distutils configuration file the option will be added to (or removed from).
+
+
+.. _test:
+
+``test`` - Build package and run a unittest suite
+=================================================
+
+When doing test-driven development, or running automated builds that need
+testing before they are deployed for downloading or use, it's often useful
+to be able to run a project's unit tests without actually deploying the project
+anywhere, even using the ``develop`` command.  The ``test`` command runs a
+project's unit tests without actually deploying it, by temporarily putting the
+project's source on ``sys.path``, after first running ``build_ext -i`` and
+``egg_info`` to ensure that any C extensions and project metadata are
+up-to-date.
+
+To use this command, your project's tests must be wrapped in a ``unittest``
+test suite by either a function, a ``TestCase`` class or method, or a module
+or package containing ``TestCase`` classes.  If the named suite is a module,
+and the module has an ``additional_tests()`` function, it is called and the
+result (which must be a ``unittest.TestSuite``) is added to the tests to be
+run.  If the named suite is a package, any submodules and subpackages are
+recursively added to the overall test suite.  (Note: if your project specifies
+a ``test_loader``, the rules for processing the chosen ``test_suite`` may
+differ; see the `test_loader`_ documentation for more details.)
+
+Note that many test systems including ``doctest`` support wrapping their
+non-``unittest`` tests in ``TestSuite`` objects.  So, if you are using a test
+package that does not support this, we suggest you encourage its developers to
+implement test suite support, as this is a convenient and standard way to
+aggregate a collection of tests to be run under a common test harness.
+
+By default, tests will be run in the "verbose" mode of the ``unittest``
+package's text test runner, but you can get the "quiet" mode (just dots) if
+you supply the ``-q`` or ``--quiet`` option, either as a global option to
+the setup script (e.g. ``setup.py -q test``) or as an option for the ``test``
+command itself (e.g. ``setup.py test -q``).  There is one other option
+available:
+
+``--test-suite=NAME, -s NAME``
+    Specify the test suite (or module, class, or method) to be run
+    (e.g. ``some_module.test_suite``).  The default for this option can be
+    set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.::
+
+        setup(
+            # ...
+            test_suite = "my_package.tests.test_all"
+        )
+
+    If you did not set a ``test_suite`` in your ``setup()`` call, and do not
+    provide a ``--test-suite`` option, an error will occur.
+
+
+.. _upload:
+
+``upload`` - Upload source and/or egg distributions to PyPI
+===========================================================
+
+PyPI now supports uploading project files for redistribution; uploaded files
+are easily found by EasyInstall, even if you don't have download links on your
+project's home page.
+
+Although Python 2.5 will support uploading all types of distributions to PyPI,
+setuptools only supports source distributions and eggs.  (This is partly
+because PyPI's upload support is currently broken for various other file
+types.)  To upload files, you must include the ``upload`` command *after* the
+``sdist`` or ``bdist_egg`` commands on the setup command line.  For example::
+
+    setup.py bdist_egg upload         # create an egg and upload it
+    setup.py sdist upload             # create a source distro and upload it
+    setup.py sdist bdist_egg upload   # create and upload both
+
+Note that to upload files for a project, the corresponding version must already
+be registered with PyPI, using the distutils ``register`` command.  It's
+usually a good idea to include the ``register`` command at the start of the
+command line, so that any registration problems can be found and fixed before
+building and uploading the distributions, e.g.::
+
+    setup.py register sdist bdist_egg upload
+
+This will update PyPI's listing for your project's current version.
+
+Note, by the way, that the metadata in your ``setup()`` call determines what
+will be listed in PyPI for your package.  Try to fill out as much of it as
+possible, as it will save you a lot of trouble manually adding and updating
+your PyPI listings.  Just put it in ``setup.py`` and use the ``register``
+comamnd to keep PyPI up to date.
+
+The ``upload`` command has a few options worth noting:
+
+``--sign, -s``
+    Sign each uploaded file using GPG (GNU Privacy Guard).  The ``gpg`` program
+    must be available for execution on the system ``PATH``.
+
+``--identity=NAME, -i NAME``
+    Specify the identity or key name for GPG to use when signing.  The value of
+    this option will be passed through the ``--local-user`` option of the
+    ``gpg`` program.
+
+``--show-response``
+    Display the full response text from server; this is useful for debugging
+    PyPI problems.
+
+``--repository=URL, -r URL``
+    The URL of the repository to upload to.  Defaults to
+    http://pypi.python.org/pypi (i.e., the main PyPI installation).
+
+.. _upload_docs:
+
+``upload_docs`` - Upload package documentation to PyPI
+======================================================
+
+PyPI now supports uploading project documentation to the dedicated URL
+http://packages.python.org/<project>/.
+
+The ``upload_docs`` command will create the necessary zip file out of a
+documentation directory and will post to the repository.
+
+Note that to upload the documentation of a project, the corresponding version
+must already be registered with PyPI, using the distutils ``register``
+command -- just like the ``upload`` command.
+
+Assuming there is an ``Example`` project with documentation in the
+subdirectory ``docs``, e.g.::
+
+  Example/
+  |-- example.py
+  |-- setup.cfg
+  |-- setup.py
+  |-- docs
+  |   |-- build
+  |   |   `-- html
+  |   |   |   |-- index.html
+  |   |   |   `-- tips_tricks.html
+  |   |-- conf.py
+  |   |-- index.txt
+  |   `-- tips_tricks.txt
+
+You can simply pass the documentation directory path to the ``upload_docs``
+command::
+
+    python setup.py upload_docs --upload-dir=docs/build/html
+
+If no ``--upload-dir`` is given, ``upload_docs`` will attempt to run the
+``build_sphinx`` command to generate uploadable documentation.
+For the command to become available, `Sphinx <http://sphinx.pocoo.org/>`_
+must be installed in the same environment as distribute.
+
+As with other ``setuptools``-based commands, you can define useful
+defaults in the ``setup.cfg`` of your Python project, e.g.:
+
+.. code-block:: ini
+
+    [upload_docs]
+    upload-dir = docs/build/html
+
+The ``upload_docs`` command has the following options:
+
+``--upload-dir``
+    The directory to be uploaded to the repository.
+
+``--show-response``
+    Display the full response text from server; this is useful for debugging
+    PyPI problems.
+
+``--repository=URL, -r URL``
+    The URL of the repository to upload to.  Defaults to
+    http://pypi.python.org/pypi (i.e., the main PyPI installation).
+
+
+--------------------------------
+Extending and Reusing Distribute
+--------------------------------
+
+Creating ``distutils`` Extensions
+=================================
+
+It can be hard to add new commands or setup arguments to the distutils.  But
+the ``setuptools`` package makes it a bit easier, by allowing you to distribute
+a distutils extension as a separate project, and then have projects that need
+the extension just refer to it in their ``setup_requires`` argument.
+
+With ``setuptools``, your distutils extension projects can hook in new
+commands and ``setup()`` arguments just by defining "entry points".  These
+are mappings from command or argument names to a specification of where to
+import a handler from.  (See the section on `Dynamic Discovery of Services and
+Plugins`_ above for some more background on entry points.)
+
+
+Adding Commands
+---------------
+
+You can add new ``setup`` commands by defining entry points in the
+``distutils.commands`` group.  For example, if you wanted to add a ``foo``
+command, you might add something like this to your distutils extension
+project's setup script::
+
+    setup(
+        # ...
+        entry_points = {
+            "distutils.commands": [
+                "foo = mypackage.some_module:foo",
+            ],
+        },
+    )
+
+(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is
+a ``setuptools.Command`` subclass.)
+
+Once a project containing such entry points has been activated on ``sys.path``,
+(e.g. by running "install" or "develop" with a site-packages installation
+directory) the command(s) will be available to any ``setuptools``-based setup
+scripts.  It is not necessary to use the ``--command-packages`` option or
+to monkeypatch the ``distutils.command`` package to install your commands;
+``setuptools`` automatically adds a wrapper to the distutils to search for
+entry points in the active distributions on ``sys.path``.  In fact, this is
+how setuptools' own commands are installed: the setuptools project's setup
+script defines entry points for them!
+
+
+Adding ``setup()`` Arguments
+----------------------------
+
+Sometimes, your commands may need additional arguments to the ``setup()``
+call.  You can enable this by defining entry points in the
+``distutils.setup_keywords`` group.  For example, if you wanted a ``setup()``
+argument called ``bar_baz``, you might add something like this to your
+distutils extension project's setup script::
+
+    setup(
+        # ...
+        entry_points = {
+            "distutils.commands": [
+                "foo = mypackage.some_module:foo",
+            ],
+            "distutils.setup_keywords": [
+                "bar_baz = mypackage.some_module:validate_bar_baz",
+            ],
+        },
+    )
+
+The idea here is that the entry point defines a function that will be called
+to validate the ``setup()`` argument, if it's supplied.  The ``Distribution``
+object will have the initial value of the attribute set to ``None``, and the
+validation function will only be called if the ``setup()`` call sets it to
+a non-None value.  Here's an example validation function::
+
+    def assert_bool(dist, attr, value):
+        """Verify that value is True, False, 0, or 1"""
+        if bool(value) != value:
+            raise DistutilsSetupError(
+                "%r must be a boolean value (got %r)" % (attr,value)
+            )
+
+Your function should accept three arguments: the ``Distribution`` object,
+the attribute name, and the attribute value.  It should raise a
+``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument
+is invalid.  Remember, your function will only be called with non-None values,
+and the default value of arguments defined this way is always None.  So, your
+commands should always be prepared for the possibility that the attribute will
+be ``None`` when they access it later.
+
+If more than one active distribution defines an entry point for the same
+``setup()`` argument, *all* of them will be called.  This allows multiple
+distutils extensions to define a common argument, as long as they agree on
+what values of that argument are valid.
+
+Also note that as with commands, it is not necessary to subclass or monkeypatch
+the distutils ``Distribution`` class in order to add your arguments; it is
+sufficient to define the entry points in your extension, as long as any setup
+script using your extension lists your project in its ``setup_requires``
+argument.
+
+
+Adding new EGG-INFO Files
+-------------------------
+
+Some extensible applications or frameworks may want to allow third parties to
+develop plugins with application or framework-specific metadata included in
+the plugins' EGG-INFO directory, for easy access via the ``pkg_resources``
+metadata API.  The easiest way to allow this is to create a distutils extension
+to be used from the plugin projects' setup scripts (via ``setup_requires``)
+that defines a new setup keyword, and then uses that data to write an EGG-INFO
+file when the ``egg_info`` command is run.
+
+The ``egg_info`` command looks for extension points in an ``egg_info.writers``
+group, and calls them to write the files.  Here's a simple example of a
+distutils extension defining a setup argument ``foo_bar``, which is a list of
+lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any
+project that uses the argument::
+
+    setup(
+        # ...
+        entry_points = {
+            "distutils.setup_keywords": [
+                "foo_bar = setuptools.dist:assert_string_list",
+            ],
+            "egg_info.writers": [
+                "foo_bar.txt = setuptools.command.egg_info:write_arg",
+            ],
+        },
+    )
+
+This simple example makes use of two utility functions defined by setuptools
+for its own use: a routine to validate that a setup keyword is a sequence of
+strings, and another one that looks up a setup argument and writes it to
+a file.  Here's what the writer utility looks like::
+
+    def write_arg(cmd, basename, filename):
+        argname = os.path.splitext(basename)[0]
+        value = getattr(cmd.distribution, argname, None)
+        if value is not None:
+            value = '\n'.join(value)+'\n'
+        cmd.write_or_delete_file(argname, filename, value)
+
+As you can see, ``egg_info.writers`` entry points must be a function taking
+three arguments: a ``egg_info`` command instance, the basename of the file to
+write (e.g. ``foo_bar.txt``), and the actual full filename that should be
+written to.
+
+In general, writer functions should honor the command object's ``dry_run``
+setting when writing files, and use the ``distutils.log`` object to do any
+console output.  The easiest way to conform to this requirement is to use
+the ``cmd`` object's ``write_file()``, ``delete_file()``, and
+``write_or_delete_file()`` methods exclusively for your file operations.  See
+those methods' docstrings for more details.
+
+
+Adding Support for Other Revision Control Systems
+-------------------------------------------------
+
+If you would like to create a plugin for ``setuptools`` to find files in other
+source control systems besides CVS and Subversion, you can do so by adding an
+entry point to the ``setuptools.file_finders`` group.  The entry point should
+be a function accepting a single directory name, and should yield
+all the filenames within that directory (and any subdirectories thereof) that
+are under revision control.
+
+For example, if you were going to create a plugin for a revision control system
+called "foobar", you would write a function something like this:
+
+.. code-block:: python
+
+    def find_files_for_foobar(dirname):
+        # loop to yield paths that start with `dirname`
+
+And you would register it in a setup script using something like this::
+
+    entry_points = {
+        "setuptools.file_finders": [
+            "foobar = my_foobar_module:find_files_for_foobar"
+        ]
+    }
+
+Then, anyone who wants to use your plugin can simply install it, and their
+local setuptools installation will be able to find the necessary files.
+
+It is not necessary to distribute source control plugins with projects that
+simply use the other source control system, or to specify the plugins in
+``setup_requires``.  When you create a source distribution with the ``sdist``
+command, setuptools automatically records what files were found in the
+``SOURCES.txt`` file.  That way, recipients of source distributions don't need
+to have revision control at all.  However, if someone is working on a package
+by checking out with that system, they will need the same plugin(s) that the
+original author is using.
+
+A few important points for writing revision control file finders:
+
+* Your finder function MUST return relative paths, created by appending to the
+  passed-in directory name.  Absolute paths are NOT allowed, nor are relative
+  paths that reference a parent directory of the passed-in directory.
+
+* Your finder function MUST accept an empty string as the directory name,
+  meaning the current directory.  You MUST NOT convert this to a dot; just
+  yield relative paths.  So, yielding a subdirectory named ``some/dir`` under
+  the current directory should NOT be rendered as ``./some/dir`` or
+  ``/somewhere/some/dir``, but *always* as simply ``some/dir``
+
+* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully
+  with the absence of needed programs (i.e., ones belonging to the revision
+  control system itself.  It *may*, however, use ``distutils.log.warn()`` to
+  inform the user of the missing program(s).
+
+
+Subclassing ``Command``
+-----------------------
+
+Sorry, this section isn't written yet, and neither is a lot of what's below
+this point, except for the change log.  You might want to `subscribe to changes
+in this page <setuptools?action=subscribe>`_ to see when new documentation is
+added or updated.
+
+XXX
+
+
+Reusing ``setuptools`` Code
+===========================
+
+``distribute_setup``
+--------------------
+
+XXX
+
+
+``setuptools.archive_util``
+---------------------------
+
+XXX
+
+
+``setuptools.sandbox``
+----------------------
+
+XXX
+
+
+``setuptools.package_index``
+----------------------------
+
+XXX
+
+History
+=======
+
+0.6c9
+ * Fixed a missing files problem when using Windows source distributions on
+   non-Windows platforms, due to distutils not handling manifest file line
+   endings correctly.
+
+ * Updated Pyrex support to work with Pyrex 0.9.6 and higher.
+
+ * Minor changes for Jython compatibility, including skipping tests that can't
+   work on Jython.
+
+ * Fixed not installing eggs in ``install_requires`` if they were also used for
+   ``setup_requires`` or ``tests_require``.
+
+ * Fixed not fetching eggs in ``install_requires`` when running tests.
+
+ * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools
+   installations when called from a standalone ``setup.py``.
+
+ * Added a warning if a namespace package is declared, but its parent package
+   is not also declared as a namespace.
+
+ * Support Subversion 1.5
+
+ * Removed use of deprecated ``md5`` module if ``hashlib`` is available
+
+ * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice
+
+ * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's
+   ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``.
+
+ * Ensure that _full_name is set on all shared libs before extensions are
+   checked for shared lib usage.  (Fixes a bug in the experimental shared
+   library build support.)
+
+ * Fix to allow unpacked eggs containing native libraries to fail more
+   gracefully under Google App Engine (with an ``ImportError`` loading the
+   C-based module, instead of getting a ``NameError``).
+
+0.6c7
+ * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and
+   ``egg_info`` command failing on new, uncommitted SVN directories.
+
+ * Fix import problems with nested namespace packages installed via
+   ``--root`` or ``--single-version-externally-managed``, due to the
+   parent package not having the child package as an attribute.
+
+0.6c6
+ * Added ``--egg-path`` option to ``develop`` command, allowing you to force
+   ``.egg-link`` files to use relative paths (allowing them to be shared across
+   platforms on a networked drive).
+
+ * Fix not building binary RPMs correctly.
+
+ * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with
+   bash-compatible shells.
+
+ * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there
+   was whitespace inside a quoted argument or at the end of the ``#!`` line
+   (a regression introduced in 0.6c4).
+
+ * Fix ``test`` command possibly failing if an older version of the project
+   being tested was installed on ``sys.path`` ahead of the test source
+   directory.
+
+ * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in
+   their names as packages.
+
+0.6c5
+ * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg``
+   packages under Python versions less than 2.5.
+
+ * Fix uploaded ``bdist_wininst`` packages being described as suitable for
+   "any" version by Python 2.5, even if a ``--target-version`` was specified.
+
+0.6c4
+ * Overhauled Windows script wrapping to support ``bdist_wininst`` better.
+   Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or
+   ``#!pythonw.exe`` as the executable name (even when built on non-Windows
+   platforms!), and the wrappers will look for the executable in the script's
+   parent directory (which should find the right version of Python).
+
+ * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or
+   ``bdist_wininst`` under Python 2.3 and 2.4.
+
+ * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is
+   prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish
+   platforms.  (This is mainly so that setuptools itself can have a single-file
+   installer on Unix, without doing multiple downloads, dealing with firewalls,
+   etc.)
+
+ * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files
+
+ * Use cross-platform relative paths in ``easy-install.pth`` when doing
+   ``develop`` and the source directory is a subdirectory of the installation
+   target directory.
+
+ * Fix a problem installing eggs with a system packaging tool if the project
+   contained an implicit namespace package; for example if the ``setup()``
+   listed a namespace package ``foo.bar`` without explicitly listing ``foo``
+   as a namespace package.
+
+0.6c3
+ * Fixed breakages caused by Subversion 1.4's new "working copy" format
+
+0.6c2
+ * The ``ez_setup`` module displays the conflicting version of setuptools (and
+   its installation location) when a script requests a version that's not
+   available.
+
+ * Running ``setup.py develop`` on a setuptools-using project will now install
+   setuptools if needed, instead of only downloading the egg.
+
+0.6c1
+ * Fixed ``AttributeError`` when trying to download a ``setup_requires``
+   dependency when a distribution lacks a ``dependency_links`` setting.
+
+ * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so
+   as to play better with packaging tools that complain about zero-length
+   files.
+
+ * Made ``setup.py develop`` respect the ``--no-deps`` option, which it
+   previously was ignoring.
+
+ * Support ``extra_path`` option to ``setup()`` when ``install`` is run in
+   backward-compatibility mode.
+
+ * Source distributions now always include a ``setup.cfg`` file that explicitly
+   sets ``egg_info`` options such that they produce an identical version number
+   to the source distribution's version number.  (Previously, the default
+   version number could be different due to the use of ``--tag-date``, or if
+   the version was overridden on the command line that built the source
+   distribution.)
+
+0.6b4
+ * Fix ``register`` not obeying name/version set by ``egg_info`` command, if
+   ``egg_info`` wasn't explicitly run first on the same command line.
+
+ * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info``
+   command, to allow suppressing tags configured in ``setup.cfg``.
+
+ * Fixed redundant warnings about missing ``README`` file(s); it should now
+   appear only if you are actually a source distribution.
+
+0.6b3
+ * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``.
+
+ * Allow ``.py`` files found by the ``include_package_data`` option to be
+   automatically included.  Remove duplicate data file matches if both
+   ``include_package_data`` and ``package_data`` are used to refer to the same
+   files.
+
+0.6b1
+ * Strip ``module`` from the end of compiled extension modules when computing
+   the name of a ``.py`` loader/wrapper.  (Python's import machinery ignores
+   this suffix when searching for an extension module.)
+
+0.6a11
+ * Added ``test_loader`` keyword to support custom test loaders
+
+ * Added ``setuptools.file_finders`` entry point group to allow implementing
+   revision control plugins.
+
+ * Added ``--identity`` option to ``upload`` command.
+
+ * Added ``dependency_links`` to allow specifying URLs for ``--find-links``.
+
+ * Enhanced test loader to scan packages as well as modules, and call
+   ``additional_tests()`` if present to get non-unittest tests.
+
+ * Support namespace packages in conjunction with system packagers, by omitting
+   the installation of any ``__init__.py`` files for namespace packages, and
+   adding a special ``.pth`` file to create a working package in
+   ``sys.modules``.
+
+ * Made ``--single-version-externally-managed`` automatic when ``--root`` is
+   used, so that most system packagers won't require special support for
+   setuptools.
+
+ * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or
+   other configuration files for their option defaults when installing, and
+   also made the install use ``--multi-version`` mode so that the project
+   directory doesn't need to support .pth files.
+
+ * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading
+   it.  Previously, the file could be left open and the actual error would be
+   masked by problems trying to remove the open file on Windows systems.
+
+0.6a10
+ * Fixed the ``develop`` command ignoring ``--find-links``.
+
+0.6a9
+ * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to
+   create source distributions.  ``MANIFEST.in`` is still read and processed,
+   as are the standard defaults and pruning.  But the manifest is built inside
+   the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt
+   every time the ``egg_info`` command is run.
+
+ * Added the ``include_package_data`` keyword to ``setup()``, allowing you to
+   automatically include any package data listed in revision control or
+   ``MANIFEST.in``
+
+ * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to
+   trim back files included via the ``package_data`` and
+   ``include_package_data`` options.
+
+ * Fixed ``--tag-svn-revision`` not working when run from a source
+   distribution.
+
+ * Added warning for namespace packages with missing ``declare_namespace()``
+
+ * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages
+   requiring ``nose`` to run unit tests can make this dependency optional
+   unless the ``test`` command is run.
+
+ * Made all commands that use ``easy_install`` respect its configuration
+   options, as this was causing some problems with ``setup.py install``.
+
+ * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so
+   that you can process a directory tree through a processing filter as if it
+   were a zipfile or tarfile.
+
+ * Added an internal ``install_egg_info`` command to use as part of old-style
+   ``install`` operations, that installs an ``.egg-info`` directory with the
+   package.
+
+ * Added a ``--single-version-externally-managed`` option to the ``install``
+   command so that you can more easily wrap a "flat" egg in a system package.
+
+ * Enhanced ``bdist_rpm`` so that it installs single-version eggs that
+   don't rely on a ``.pth`` file.  The ``--no-egg`` option has been removed,
+   since all RPMs are now built in a more backwards-compatible format.
+
+ * Support full roundtrip translation of eggs to and from ``bdist_wininst``
+   format.  Running ``bdist_wininst`` on a setuptools-based package wraps the
+   egg in an .exe that will safely install it as an egg (i.e., with metadata
+   and entry-point wrapper scripts), and ``easy_install`` can turn the .exe
+   back into an ``.egg`` file or directory and install it as such.
+
+
+0.6a8
+ * Fixed some problems building extensions when Pyrex was installed, especially
+   with Python 2.4 and/or packages using SWIG.
+
+ * Made ``develop`` command accept all the same options as ``easy_install``,
+   and use the ``easy_install`` command's configuration settings as defaults.
+
+ * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision
+   number from ``PKG-INFO`` in case it is being run on a source distribution of
+   a snapshot taken from a Subversion-based project.
+
+ * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being
+   installed as data, adding them to ``native_libs.txt`` automatically.
+
+ * Fixed some problems with fresh checkouts of projects that don't include
+   ``.egg-info/PKG-INFO`` under revision control and put the project's source
+   code directly in the project directory.  If such a package had any
+   requirements that get processed before the ``egg_info`` command can be run,
+   the setup scripts would fail with a "Missing 'Version:' header and/or
+   PKG-INFO file" error, because the egg runtime interpreted the unbuilt
+   metadata in a directory on ``sys.path`` (i.e. the current directory) as
+   being a corrupted egg.  Setuptools now monkeypatches the distribution
+   metadata cache to pretend that the egg has valid version information, until
+   it has a chance to make it actually be so (via the ``egg_info`` command).
+
+0.6a5
+ * Fixed missing gui/cli .exe files in distribution.  Fixed bugs in tests.
+
+0.6a3
+ * Added ``gui_scripts`` entry point group to allow installing GUI scripts
+   on Windows and other platforms.  (The special handling is only for Windows;
+   other platforms are treated the same as for ``console_scripts``.)
+
+0.6a2
+ * Added ``console_scripts`` entry point group to allow installing scripts
+   without the need to create separate script files.  On Windows, console
+   scripts get an ``.exe`` wrapper so you can just type their name.  On other
+   platforms, the scripts are written without a file extension.
+
+0.6a1
+ * Added support for building "old-style" RPMs that don't install an egg for
+   the target package, using a ``--no-egg`` option.
+
+ * The ``build_ext`` command now works better when using the ``--inplace``
+   option and multiple Python versions.  It now makes sure that all extensions
+   match the current Python version, even if newer copies were built for a
+   different Python version.
+
+ * The ``upload`` command no longer attaches an extra ``.zip`` when uploading
+   eggs, as PyPI now supports egg uploads without trickery.
+
+ * The ``ez_setup`` script/module now displays a warning before downloading
+   the setuptools egg, and attempts to check the downloaded egg against an
+   internal MD5 checksum table.
+
+ * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the
+   latest revision number; it was using the revision number of the directory
+   containing ``setup.py``, not the highest revision number in the project.
+
+ * Added ``eager_resources`` setup argument
+
+ * The ``sdist`` command now recognizes Subversion "deleted file" entries and
+   does not include them in source distributions.
+
+ * ``setuptools`` now embeds itself more thoroughly into the distutils, so that
+   other distutils extensions (e.g. py2exe, py2app) will subclass setuptools'
+   versions of things, rather than the native distutils ones.
+
+ * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``;
+   ``setup_requires`` allows you to automatically find and download packages
+   that are needed in order to *build* your project (as opposed to running it).
+
+ * ``setuptools`` now finds its commands, ``setup()`` argument validators, and
+   metadata writers using entry points, so that they can be extended by
+   third-party packages.  See `Creating distutils Extensions`_ above for more
+   details.
+
+ * The vestigial ``depends`` command has been removed.  It was never finished
+   or documented, and never would have worked without EasyInstall - which it
+   pre-dated and was never compatible with.
+
+0.5a12
+ * The zip-safety scanner now checks for modules that might be used with
+   ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't
+   handle ``-m`` on zipped modules.
+
+0.5a11
+ * Fix breakage of the "develop" command that was caused by the addition of
+   ``--always-unzip`` to the ``easy_install`` command.
+
+0.5a9
+ * Include ``svn:externals`` directories in source distributions as well as
+   normal subversion-controlled files and directories.
+
+ * Added ``exclude=patternlist`` option to ``setuptools.find_packages()``
+
+ * Changed --tag-svn-revision to include an "r" in front of the revision number
+   for better readability.
+
+ * Added ability to build eggs without including source files (except for any
+   scripts, of course), using the ``--exclude-source-files`` option to
+   ``bdist_egg``.
+
+ * ``setup.py install`` now automatically detects when an "unmanaged" package
+   or module is going to be on ``sys.path`` ahead of a package being installed,
+   thereby preventing the newer version from being imported.  If this occurs,
+   a warning message is output to ``sys.stderr``, but installation proceeds
+   anyway.  The warning message informs the user what files or directories
+   need deleting, and advises them they can also use EasyInstall (with the
+   ``--delete-conflicting`` option) to do it automatically.
+
+ * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata
+   directory that lists all top-level modules and packages in the distribution.
+   This is used by the ``easy_install`` command to find possibly-conflicting
+   "unmanaged" packages when installing the distribution.
+
+ * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``.
+   Added package analysis to determine zip-safety if the ``zip_safe`` flag
+   is not given, and advise the author regarding what code might need changing.
+
+ * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``.
+
+0.5a8
+ * The "egg_info" command now always sets the distribution metadata to "safe"
+   forms of the distribution name and version, so that distribution files will
+   be generated with parseable names (i.e., ones that don't include '-' in the
+   name or version).  Also, this means that if you use the various ``--tag``
+   options of "egg_info", any distributions generated will use the tags in the
+   version, not just egg distributions.
+
+ * Added support for defining command aliases in distutils configuration files,
+   under the "[aliases]" section.  To prevent recursion and to allow aliases to
+   call the command of the same name, a given alias can be expanded only once
+   per command-line invocation.  You can define new aliases with the "alias"
+   command, either for the local, global, or per-user configuration.
+
+ * Added "rotate" command to delete old distribution files, given a set of
+   patterns to match and the number of files to keep.  (Keeps the most
+   recently-modified distribution files matching each pattern.)
+
+ * Added "saveopts" command that saves all command-line options for the current
+   invocation to the local, global, or per-user configuration file.  Useful for
+   setting defaults without having to hand-edit a configuration file.
+
+ * Added a "setopt" command that sets a single option in a specified distutils
+   configuration file.
+
+0.5a7
+ * Added "upload" support for egg and source distributions, including a bug
+   fix for "upload" and a temporary workaround for lack of .egg support in
+   PyPI.
+
+0.5a6
+ * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it
+   will include all files under revision control (CVS or Subversion) in the
+   current directory, and it will regenerate the list every time you create a
+   source distribution, not just when you tell it to.  This should make the
+   default "do what you mean" more often than the distutils' default behavior
+   did, while still retaining the old behavior in the presence of MANIFEST.in.
+
+ * Fixed the "develop" command always updating .pth files, even if you
+   specified ``-n`` or ``--dry-run``.
+
+ * Slightly changed the format of the generated version when you use
+   ``--tag-build`` on the "egg_info" command, so that you can make tagged
+   revisions compare *lower* than the version specified in setup.py (e.g. by
+   using ``--tag-build=dev``).
+
+0.5a5
+ * Added ``develop`` command to ``setuptools``-based packages.  This command
+   installs an ``.egg-link`` pointing to the package's source directory, and
+   script wrappers that ``execfile()`` the source versions of the package's
+   scripts.  This lets you put your development checkout(s) on sys.path without
+   having to actually install them.  (To uninstall the link, use
+   use ``setup.py develop --uninstall``.)
+
+ * Added ``egg_info`` command to ``setuptools``-based packages.  This command
+   just creates or updates the "projectname.egg-info" directory, without
+   building an egg.  (It's used by the ``bdist_egg``, ``test``, and ``develop``
+   commands.)
+
+ * Enhanced the ``test`` command so that it doesn't install the package, but
+   instead builds any C extensions in-place, updates the ``.egg-info``
+   metadata, adds the source directory to ``sys.path``, and runs the tests
+   directly on the source.  This avoids an "unmanaged" installation of the
+   package to ``site-packages`` or elsewhere.
+
+ * Made ``easy_install`` a standard ``setuptools`` command, moving it from
+   the ``easy_install`` module to ``setuptools.command.easy_install``.  Note
+   that if you were importing or extending it, you must now change your imports
+   accordingly.  ``easy_install.py`` is still installed as a script, but not as
+   a module.
+
+0.5a4
+ * Setup scripts using setuptools can now list their dependencies directly in
+   the setup.py file, without having to manually create a ``depends.txt`` file.
+   The ``install_requires`` and ``extras_require`` arguments to ``setup()``
+   are used to create a dependencies file automatically.  If you are manually
+   creating ``depends.txt`` right now, please switch to using these setup
+   arguments as soon as practical, because ``depends.txt`` support will be
+   removed in the 0.6 release cycle.  For documentation on the new arguments,
+   see the ``setuptools.dist.Distribution`` class.
+
+ * Setup scripts using setuptools now always install using ``easy_install``
+   internally, for ease of uninstallation and upgrading.
+
+0.5a1
+ * Added support for "self-installation" bootstrapping.  Packages can now
+   include ``ez_setup.py`` in their source distribution, and add the following
+   to their ``setup.py``, in order to automatically bootstrap installation of
+   setuptools as part of their setup process::
+
+    from ez_setup import use_setuptools
+    use_setuptools()
+
+    from setuptools import setup
+    # etc...
+
+0.4a2
+ * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools
+   installation easier, and to allow distributions using setuptools to avoid
+   having to include setuptools in their source distribution.
+
+ * All downloads are now managed by the ``PackageIndex`` class (which is now
+   subclassable and replaceable), so that embedders can more easily override
+   download logic, give download progress reports, etc.  The class has also
+   been moved to the new ``setuptools.package_index`` module.
+
+ * The ``Installer`` class no longer handles downloading, manages a temporary
+   directory, or tracks the ``zip_ok`` option.  Downloading is now handled
+   by ``PackageIndex``, and ``Installer`` has become an ``easy_install``
+   command class based on ``setuptools.Command``.
+
+ * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup
+   script in a directory sandbox, and a new ``setuptools.archive_util`` module
+   with an ``unpack_archive()`` API.  These were split out of EasyInstall to
+   allow reuse by other tools and applications.
+
+ * ``setuptools.Command`` now supports reinitializing commands using keyword
+   arguments to set/reset options.  Also, ``Command`` subclasses can now set
+   their ``command_consumes_arguments`` attribute to ``True`` in order to
+   receive an ``args`` option containing the rest of the command line.
+
+0.3a2
+ * Added new options to ``bdist_egg`` to allow tagging the egg's version number
+   with a subversion revision number, the current date, or an explicit tag
+   value.  Run ``setup.py bdist_egg --help`` to get more information.
+
+ * Misc. bug fixes
+
+0.3a1
+ * Initial release.
+
+Mailing List and Bug Tracker
+============================
+
+Please use the `distutils-sig mailing list`_ for questions and discussion about
+setuptools, and the `setuptools bug tracker`_ ONLY for issues you have
+confirmed via the list are actual bugs, and which you have reduced to a minimal
+set of steps to reproduce.
+
+.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
+.. _setuptools bug tracker: http://bugs.python.org/setuptools/
+
diff --git a/vendor/distribute-0.6.35/docs/using.txt b/vendor/distribute-0.6.35/docs/using.txt
new file mode 100644
index 0000000000000000000000000000000000000000..192f1dc234a30dc1351c7f3cb7accd10e439ae72
--- /dev/null
+++ b/vendor/distribute-0.6.35/docs/using.txt
@@ -0,0 +1,21 @@
+================================
+Using Distribute in your project
+================================
+
+To use Distribute in your project, the recommended way is to ship
+`distribute_setup.py` alongside your `setup.py` script and call 
+it at the very begining of `setup.py` like this::
+
+    from distribute_setup import use_setuptools
+    use_setuptools()
+
+Another way is to add ``Distribute`` in the ``install_requires`` option::
+
+    from setuptools import setup
+
+    setup(...
+          install_requires=['distribute']
+    )
+
+
+XXX to be finished
diff --git a/vendor/distribute-0.6.35/easy_install.py b/vendor/distribute-0.6.35/easy_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..d87e984034b6e6e9eb456ebcb2b3f420c07a48bc
--- /dev/null
+++ b/vendor/distribute-0.6.35/easy_install.py
@@ -0,0 +1,5 @@
+"""Run the EasyInstall command"""
+
+if __name__ == '__main__':
+    from setuptools.command.easy_install import main
+    main()
diff --git a/vendor/distribute-0.6.35/launcher.c b/vendor/distribute-0.6.35/launcher.c
new file mode 100644
index 0000000000000000000000000000000000000000..ea4c80b5c4e0103fb9fd9bb252463e969385972c
--- /dev/null
+++ b/vendor/distribute-0.6.35/launcher.c
@@ -0,0 +1,327 @@
+/*  Setuptools Script Launcher for Windows
+
+    This is a stub executable for Windows that functions somewhat like
+    Effbot's "exemaker", in that it runs a script with the same name but
+    a .py extension, using information from a #! line.  It differs in that
+    it spawns the actual Python executable, rather than attempting to
+    hook into the Python DLL.  This means that the script will run with
+    sys.executable set to the Python executable, where exemaker ends up with
+    sys.executable pointing to itself.  (Which means it won't work if you try
+    to run another Python process using sys.executable.)
+
+    To build/rebuild with mingw32, do this in the setuptools project directory:
+
+       gcc -DGUI=0           -mno-cygwin -O -s -o setuptools/cli.exe launcher.c
+       gcc -DGUI=1 -mwindows -mno-cygwin -O -s -o setuptools/gui.exe launcher.c
+
+    It links to msvcrt.dll, but this shouldn't be a problem since it doesn't
+    actually run Python in the same process.  Note that using 'exec' instead
+    of 'spawn' doesn't work, because on Windows this leads to the Python
+    executable running in the *background*, attached to the same console
+    window, meaning you get a command prompt back *before* Python even finishes
+    starting.  So, we have to use spawnv() and wait for Python to exit before
+    continuing.  :(
+*/
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <windows.h>
+#include <tchar.h>
+#include <fcntl.h>
+
+int child_pid=0;
+
+int fail(char *format, char *data) {
+    /* Print error message to stderr and return 2 */
+    fprintf(stderr, format, data);
+    return 2;
+}
+
+char *quoted(char *data) {
+    int i, ln = strlen(data), nb;
+
+    /* We allocate twice as much space as needed to deal with worse-case
+       of having to escape everything. */
+    char *result = calloc(ln*2+3, sizeof(char));
+    char *presult = result;
+
+    *presult++ = '"';
+    for (nb=0, i=0; i < ln; i++)
+      {
+        if (data[i] == '\\')
+          nb += 1;
+        else if (data[i] == '"')
+          {
+            for (; nb > 0; nb--)
+              *presult++ = '\\';
+            *presult++ = '\\';
+          }
+        else
+          nb = 0;
+        *presult++ = data[i];
+      }
+
+    for (; nb > 0; nb--)        /* Deal w trailing slashes */
+      *presult++ = '\\';
+
+    *presult++ = '"';
+    *presult++ = 0;
+    return result;
+}
+
+
+
+
+
+
+
+
+
+
+char *loadable_exe(char *exename) {
+    /* HINSTANCE hPython;  DLL handle for python executable */
+    char *result;
+
+    /* hPython = LoadLibraryEx(exename, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
+    if (!hPython) return NULL; */
+
+    /* Return the absolute filename for spawnv */
+    result = calloc(MAX_PATH, sizeof(char));
+    strncpy(result, exename, MAX_PATH);
+    /*if (result) GetModuleFileNameA(hPython, result, MAX_PATH);
+
+    FreeLibrary(hPython); */
+    return result;
+}
+
+
+char *find_exe(char *exename, char *script) {
+    char drive[_MAX_DRIVE], dir[_MAX_DIR], fname[_MAX_FNAME], ext[_MAX_EXT];
+    char path[_MAX_PATH], c, *result;
+
+    /* convert slashes to backslashes for uniform search below */
+    result = exename;
+    while (c = *result++) if (c=='/') result[-1] = '\\';
+
+    _splitpath(exename, drive, dir, fname, ext);
+    if (drive[0] || dir[0]=='\\') {
+        return loadable_exe(exename);   /* absolute path, use directly */
+    }
+    /* Use the script's parent directory, which should be the Python home
+       (This should only be used for bdist_wininst-installed scripts, because
+        easy_install-ed scripts use the absolute path to python[w].exe
+    */
+    _splitpath(script, drive, dir, fname, ext);
+    result = dir + strlen(dir) -1;
+    if (*result == '\\') result--;
+    while (*result != '\\' && result>=dir) *result-- = 0;
+    _makepath(path, drive, dir, exename, NULL);
+    return loadable_exe(path);
+}
+
+
+char **parse_argv(char *cmdline, int *argc)
+{
+    /* Parse a command line in-place using MS C rules */
+
+    char **result = calloc(strlen(cmdline), sizeof(char *));
+    char *output = cmdline;
+    char c;
+    int nb = 0;
+    int iq = 0;
+    *argc = 0;
+
+    result[0] = output;
+    while (isspace(*cmdline)) cmdline++;   /* skip leading spaces */
+
+    do {
+        c = *cmdline++;
+        if (!c || (isspace(c) && !iq)) {
+            while (nb) {*output++ = '\\'; nb--; }
+            *output++ = 0;
+            result[++*argc] = output;
+            if (!c) return result;
+            while (isspace(*cmdline)) cmdline++;  /* skip leading spaces */
+            if (!*cmdline) return result;  /* avoid empty arg if trailing ws */
+            continue;
+        }
+        if (c == '\\')
+            ++nb;   /* count \'s */
+        else {
+            if (c == '"') {
+                if (!(nb & 1)) { iq = !iq; c = 0; }  /* skip " unless odd # of \ */
+                nb = nb >> 1;   /* cut \'s in half */
+            }
+            while (nb) {*output++ = '\\'; nb--; }
+            if (c) *output++ = c;
+        }
+    } while (1);
+}
+
+void pass_control_to_child(DWORD control_type) {
+    /*
+     * distribute-issue207
+     * passes the control event to child process (Python)
+     */
+    if (!child_pid) {
+        return;
+    }
+    GenerateConsoleCtrlEvent(child_pid,0);
+}
+
+BOOL control_handler(DWORD control_type) {
+    /* 
+     * distribute-issue207
+     * control event handler callback function
+     */
+    switch (control_type) {
+        case CTRL_C_EVENT:
+            pass_control_to_child(0);
+            break;
+    }
+    return TRUE;
+}
+
+int create_and_wait_for_subprocess(char* command) {
+    /*
+     * distribute-issue207
+     * launches child process (Python)
+     */
+    DWORD return_value = 0;
+    LPSTR commandline = command;
+    STARTUPINFOA s_info;
+    PROCESS_INFORMATION p_info;
+    ZeroMemory(&p_info, sizeof(p_info));
+    ZeroMemory(&s_info, sizeof(s_info));
+    s_info.cb = sizeof(STARTUPINFO);
+    // set-up control handler callback funciotn
+    SetConsoleCtrlHandler((PHANDLER_ROUTINE) control_handler, TRUE);
+    if (!CreateProcessA(NULL, commandline, NULL, NULL, TRUE, 0, NULL, NULL, &s_info, &p_info)) {
+        fprintf(stderr, "failed to create process.\n");
+        return 0;
+    }   
+    child_pid = p_info.dwProcessId;
+    // wait for Python to exit
+    WaitForSingleObject(p_info.hProcess, INFINITE);
+    if (!GetExitCodeProcess(p_info.hProcess, &return_value)) {
+        fprintf(stderr, "failed to get exit code from process.\n");
+        return 0;
+    }
+    return return_value;
+}
+
+char* join_executable_and_args(char *executable, char **args, int argc)
+{
+    /*
+     * distribute-issue207
+     * CreateProcess needs a long string of the executable and command-line arguments,
+     * so we need to convert it from the args that was built
+     */
+    int len,counter;
+    char* cmdline;
+    
+    len=strlen(executable)+2;
+    for (counter=1; counter<argc; counter++) {
+        len+=strlen(args[counter])+1;
+    }
+
+    cmdline = (char*)calloc(len, sizeof(char));
+    sprintf(cmdline, "%s", executable);
+    len=strlen(executable);
+    for (counter=1; counter<argc; counter++) {
+        sprintf(cmdline+len, " %s", args[counter]);
+        len+=strlen(args[counter])+1;
+    }
+    return cmdline;
+}
+
+int run(int argc, char **argv, int is_gui) {
+
+    char python[256];   /* python executable's filename*/
+    char *pyopt;        /* Python option */
+    char script[256];   /* the script's filename */
+
+    int scriptf;        /* file descriptor for script file */
+
+    char **newargs, **newargsp, **parsedargs; /* argument array for exec */
+    char *ptr, *end;    /* working pointers for string manipulation */
+    char *cmdline;
+    int i, parsedargc;              /* loop counter */
+
+    /* compute script name from our .exe name*/
+    GetModuleFileNameA(NULL, script, sizeof(script));
+    end = script + strlen(script);
+    while( end>script && *end != '.')
+        *end-- = '\0';
+    *end-- = '\0';
+    strcat(script, (GUI ? "-script.pyw" : "-script.py"));
+
+    /* figure out the target python executable */
+
+    scriptf = open(script, O_RDONLY);
+    if (scriptf == -1) {
+        return fail("Cannot open %s\n", script);
+    }
+    end = python + read(scriptf, python, sizeof(python));
+    close(scriptf);
+
+    ptr = python-1;
+    while(++ptr < end && *ptr && *ptr!='\n' && *ptr!='\r') {;}
+
+    *ptr-- = '\0';
+
+    if (strncmp(python, "#!", 2)) {
+        /* default to python.exe if no #! header */
+        strcpy(python, "#!python.exe");
+    }
+
+    parsedargs = parse_argv(python+2, &parsedargc);
+
+    /* Using spawnv() can fail strangely if you e.g. find the Cygwin
+       Python, so we'll make sure Windows can find and load it */
+
+    ptr = find_exe(parsedargs[0], script);
+    if (!ptr) {
+        return fail("Cannot find Python executable %s\n", parsedargs[0]);
+    }
+
+    /* printf("Python executable: %s\n", ptr); */
+
+    /* Argument array needs to be
+       parsedargc + argc, plus 1 for null sentinel */
+
+    newargs = (char **)calloc(parsedargc + argc + 1, sizeof(char *));
+    newargsp = newargs;
+
+    *newargsp++ = quoted(ptr);
+    for (i = 1; i<parsedargc; i++) *newargsp++ = quoted(parsedargs[i]);
+
+    *newargsp++ = quoted(script);
+    for (i = 1; i < argc; i++)     *newargsp++ = quoted(argv[i]);
+
+    *newargsp++ = NULL;
+
+    /* printf("args 0: %s\nargs 1: %s\n", newargs[0], newargs[1]); */
+
+    if (is_gui) {
+        /* Use exec, we don't need to wait for the GUI to finish */
+        execv(ptr, (const char * const *)(newargs));
+        return fail("Could not exec %s", ptr);   /* shouldn't get here! */
+    }
+
+    /*
+     * distribute-issue207: using CreateProcessA instead of spawnv
+     */
+    cmdline = join_executable_and_args(ptr, newargs, parsedargc + argc);
+    return create_and_wait_for_subprocess(cmdline);
+}
+
+int WINAPI WinMain(HINSTANCE hI, HINSTANCE hP, LPSTR lpCmd, int nShow) {
+    return run(__argc, __argv, GUI);
+}
+
+int main(int argc, char** argv) {
+    return run(argc, argv, GUI);
+}
+
diff --git a/vendor/distribute-0.6.35/pkg_resources.py b/vendor/distribute-0.6.35/pkg_resources.py
new file mode 100644
index 0000000000000000000000000000000000000000..69601480d3ef4fdfe9d573fd00d802fc918ff147
--- /dev/null
+++ b/vendor/distribute-0.6.35/pkg_resources.py
@@ -0,0 +1,2827 @@
+"""Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof.  The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is.  Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files.  It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+import sys, os, zipimport, time, re, imp, types
+from urlparse import urlparse, urlunparse
+
+try:
+    frozenset
+except NameError:
+    from sets import ImmutableSet as frozenset
+
+# capture these to bypass sandboxing
+from os import utime
+try:
+    from os import mkdir, rename, unlink
+    WRITE_SUPPORT = True
+except ImportError:
+    # no write support, probably under GAE
+    WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+# Avoid try/except due to potential problems with delayed import mechanisms.
+if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
+    import importlib._bootstrap as importlib_bootstrap
+else:
+    importlib_bootstrap = None
+
+# This marker is used to simplify the process that checks is the
+# setuptools package was installed by the Setuptools project
+# or by the Distribute project, in case Setuptools creates
+# a distribution with the same version.
+#
+# The bootstrapping script for instance, will check if this
+# attribute is present to decide wether to reinstall the package
+_distribute = True
+
+def _bypass_ensure_directory(name, mode=0777):
+    # Sandbox-bypassing version of ensure_directory()
+    if not WRITE_SUPPORT:
+        raise IOError('"os.mkdir" not supported on this platform.')
+    dirname, filename = split(name)
+    if dirname and filename and not isdir(dirname):
+        _bypass_ensure_directory(dirname)
+        mkdir(dirname, mode)
+
+
+_state_vars = {}
+
+def _declare_state(vartype, **kw):
+    g = globals()
+    for name, val in kw.iteritems():
+        g[name] = val
+        _state_vars[name] = vartype
+
+def __getstate__():
+    state = {}
+    g = globals()
+    for k, v in _state_vars.iteritems():
+        state[k] = g['_sget_'+v](g[k])
+    return state
+
+def __setstate__(state):
+    g = globals()
+    for k, v in state.iteritems():
+        g['_sset_'+_state_vars[k]](k, g[k], v)
+    return state
+
+def _sget_dict(val):
+    return val.copy()
+
+def _sset_dict(key, ob, state):
+    ob.clear()
+    ob.update(state)
+
+def _sget_object(val):
+    return val.__getstate__()
+
+def _sset_object(key, ob, state):
+    ob.__setstate__(state)
+
+_sget_none = _sset_none = lambda *args: None
+
+
+
+def get_supported_platform():
+    """Return this platform's maximum compatible version.
+
+    distutils.util.get_platform() normally reports the minimum version
+    of Mac OS X that would be required to *use* extensions produced by
+    distutils.  But what we want when checking compatibility is to know the
+    version of Mac OS X that we are *running*.  To allow usage of packages that
+    explicitly require a newer version of Mac OS X, we must also know the
+    current version of the OS.
+
+    If this condition occurs for any other platform with a version in its
+    platform strings, this function should be extended accordingly.
+    """
+    plat = get_build_platform(); m = macosVersionString.match(plat)
+    if m is not None and sys.platform == "darwin":
+        try:
+            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+        except ValueError:
+            pass    # not Mac OS X
+    return plat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+__all__ = [
+    # Basic resource access and distribution/entry point discovery
+    'require', 'run_script', 'get_provider',  'get_distribution',
+    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
+    'resource_string', 'resource_stream', 'resource_filename',
+    'resource_listdir', 'resource_exists', 'resource_isdir',
+
+    # Environmental control
+    'declare_namespace', 'working_set', 'add_activation_listener',
+    'find_distributions', 'set_extraction_path', 'cleanup_resources',
+    'get_default_cache',
+
+    # Primary implementation classes
+    'Environment', 'WorkingSet', 'ResourceManager',
+    'Distribution', 'Requirement', 'EntryPoint',
+
+    # Exceptions
+    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
+    'ExtractionError',
+
+    # Parsing functions and string utilities
+    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+    'safe_extra', 'to_filename',
+
+    # filesystem utilities
+    'ensure_directory', 'normalize_path',
+
+    # Distribution "precedence" constants
+    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+    # "Provider" interfaces, implementations, and registration/lookup APIs
+    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+    'register_finder', 'register_namespace_handler', 'register_loader_type',
+    'fixup_namespace_packages', 'get_importer',
+
+    # Deprecated/backward compatibility only
+    'run_main', 'AvailableDistributions',
+]
+class ResolutionError(Exception):
+    """Abstract base for dependency resolution errors"""
+    def __repr__(self):
+        return self.__class__.__name__+repr(self.args)
+
+class VersionConflict(ResolutionError):
+    """An already-installed version conflicts with the requested version"""
+
+class DistributionNotFound(ResolutionError):
+    """A requested distribution was not found"""
+
+class UnknownExtra(ResolutionError):
+    """Distribution doesn't have an "extra feature" of the given name"""
+_provider_factories = {}
+
+PY_MAJOR = sys.version[:3]
+EGG_DIST    = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+def register_loader_type(loader_type, provider_factory):
+    """Register `provider_factory` to make providers for `loader_type`
+
+    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+    and `provider_factory` is a function that, passed a *module* object,
+    returns an ``IResourceProvider`` for that module.
+    """
+    _provider_factories[loader_type] = provider_factory
+
+def get_provider(moduleOrReq):
+    """Return an IResourceProvider for the named module or requirement"""
+    if isinstance(moduleOrReq,Requirement):
+        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+    try:
+        module = sys.modules[moduleOrReq]
+    except KeyError:
+        __import__(moduleOrReq)
+        module = sys.modules[moduleOrReq]
+    loader = getattr(module, '__loader__', None)
+    return _find_adapter(_provider_factories, loader)(module)
+
+def _macosx_vers(_cache=[]):
+    if not _cache:
+        import platform
+        version = platform.mac_ver()[0]
+        # fallback for MacPorts
+        if version == '':
+            import plistlib
+            plist = '/System/Library/CoreServices/SystemVersion.plist'
+            if os.path.exists(plist):
+                if hasattr(plistlib, 'readPlist'):
+                    plist_content = plistlib.readPlist(plist)
+                    if 'ProductVersion' in plist_content:
+                        version = plist_content['ProductVersion']
+
+        _cache.append(version.split('.'))
+    return _cache[0]
+
+def _macosx_arch(machine):
+    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
+
+def get_build_platform():
+    """Return this platform's string for platform-specific distributions
+
+    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+    needs some hacks for Linux and Mac OS X.
+    """
+    try:
+        from distutils.util import get_platform
+    except ImportError:
+        from sysconfig import get_platform
+
+    plat = get_platform()
+    if sys.platform == "darwin" and not plat.startswith('macosx-'):
+        try:
+            version = _macosx_vers()
+            machine = os.uname()[4].replace(" ", "_")
+            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
+                _macosx_arch(machine))
+        except ValueError:
+            # if someone is running a non-Mac darwin system, this will fall
+            # through to the default implementation
+            pass
+    return plat
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+get_platform = get_build_platform   # XXX backward compat
+
+def compatible_platforms(provided,required):
+    """Can code for the `provided` platform run on the `required` platform?
+
+    Returns true if either platform is ``None``, or the platforms are equal.
+
+    XXX Needs compatibility checks for Linux and other unixy OSes.
+    """
+    if provided is None or required is None or provided==required:
+        return True     # easy case
+
+    # Mac OS X special cases
+    reqMac = macosVersionString.match(required)
+    if reqMac:
+        provMac = macosVersionString.match(provided)
+
+        # is this a Mac package?
+        if not provMac:
+            # this is backwards compatibility for packages built before
+            # setuptools 0.6. All packages built after this point will
+            # use the new macosx designation.
+            provDarwin = darwinVersionString.match(provided)
+            if provDarwin:
+                dversion = int(provDarwin.group(1))
+                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                if dversion == 7 and macosversion >= "10.3" or \
+                    dversion == 8 and macosversion >= "10.4":
+
+                    #import warnings
+                    #warnings.warn("Mac eggs should be rebuilt to "
+                    #    "use the macosx designation instead of darwin.",
+                    #    category=DeprecationWarning)
+                    return True
+            return False    # egg isn't macosx or legacy darwin
+
+        # are they the same major version and machine type?
+        if provMac.group(1) != reqMac.group(1) or \
+            provMac.group(3) != reqMac.group(3):
+            return False
+
+
+
+        # is the required OS major update >= the provided one?
+        if int(provMac.group(2)) > int(reqMac.group(2)):
+            return False
+
+        return True
+
+    # XXX Linux and other platforms' special cases should go here
+    return False
+
+
+def run_script(dist_spec, script_name):
+    """Locate distribution `dist_spec` and run its `script_name` script"""
+    ns = sys._getframe(1).f_globals
+    name = ns['__name__']
+    ns.clear()
+    ns['__name__'] = name
+    require(dist_spec)[0].run_script(script_name, ns)
+
+run_main = run_script   # backward compatibility
+
+def get_distribution(dist):
+    """Return a current distribution object for a Requirement or string"""
+    if isinstance(dist,basestring): dist = Requirement.parse(dist)
+    if isinstance(dist,Requirement): dist = get_provider(dist)
+    if not isinstance(dist,Distribution):
+        raise TypeError("Expected string, Requirement, or Distribution", dist)
+    return dist
+
+def load_entry_point(dist, group, name):
+    """Return `name` entry point of `group` for `dist` or raise ImportError"""
+    return get_distribution(dist).load_entry_point(group, name)
+
+def get_entry_map(dist, group=None):
+    """Return the entry point map for `group`, or the full entry map"""
+    return get_distribution(dist).get_entry_map(group)
+
+def get_entry_info(dist, group, name):
+    """Return the EntryPoint object for `group`+`name`, or ``None``"""
+    return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+
+    def has_metadata(name):
+        """Does the package's distribution contain the named metadata?"""
+
+    def get_metadata(name):
+        """The named metadata resource as a string"""
+
+    def get_metadata_lines(name):
+        """Yield named metadata resource as list of non-blank non-comment lines
+
+       Leading and trailing whitespace is stripped from each line, and lines
+       with ``#`` as the first non-blank character are omitted."""
+
+    def metadata_isdir(name):
+        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+
+    def metadata_listdir(name):
+        """List of metadata names in the directory (like ``os.listdir()``)"""
+
+    def run_script(script_name, namespace):
+        """Execute the named script in the supplied namespace dictionary"""
+
+
+
+
+
+
+
+
+
+
+class IResourceProvider(IMetadataProvider):
+    """An object that provides access to package resources"""
+
+    def get_resource_filename(manager, resource_name):
+        """Return a true filesystem path for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_stream(manager, resource_name):
+        """Return a readable file-like object for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_string(manager, resource_name):
+        """Return a string containing the contents of `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def has_resource(resource_name):
+        """Does the package contain the named resource?"""
+
+    def resource_isdir(resource_name):
+        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+
+    def resource_listdir(resource_name):
+        """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class WorkingSet(object):
+    """A collection of active distributions on sys.path (or a similar list)"""
+
+    def __init__(self, entries=None):
+        """Create working set from list of path entries (default=sys.path)"""
+        self.entries = []
+        self.entry_keys = {}
+        self.by_key = {}
+        self.callbacks = []
+
+        if entries is None:
+            entries = sys.path
+
+        for entry in entries:
+            self.add_entry(entry)
+
+
+    def add_entry(self, entry):
+        """Add a path item to ``.entries``, finding any distributions on it
+
+        ``find_distributions(entry,True)`` is used to find distributions
+        corresponding to the path entry, and they are added.  `entry` is
+        always appended to ``.entries``, even if it is already present.
+        (This is because ``sys.path`` can contain the same value more than
+        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+        equal ``sys.path``.)
+        """
+        self.entry_keys.setdefault(entry, [])
+        self.entries.append(entry)
+        for dist in find_distributions(entry, True):
+            self.add(dist, entry, False)
+
+
+    def __contains__(self,dist):
+        """True if `dist` is the active distribution for its project"""
+        return self.by_key.get(dist.key) == dist
+
+
+
+
+
+    def find(self, req):
+        """Find a distribution matching requirement `req`
+
+        If there is an active distribution for the requested project, this
+        returns it as long as it meets the version requirement specified by
+        `req`.  But, if there is an active distribution for the project and it
+        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+        If there is no active distribution for the requested project, ``None``
+        is returned.
+        """
+        dist = self.by_key.get(req.key)
+        if dist is not None and dist not in req:
+            raise VersionConflict(dist,req)     # XXX add more info
+        else:
+            return dist
+
+    def iter_entry_points(self, group, name=None):
+        """Yield entry point objects from `group` matching `name`
+
+        If `name` is None, yields all entry points in `group` from all
+        distributions in the working set, otherwise only ones matching
+        both `group` and `name` are yielded (in distribution order).
+        """
+        for dist in self:
+            entries = dist.get_entry_map(group)
+            if name is None:
+                for ep in entries.values():
+                    yield ep
+            elif name in entries:
+                yield entries[name]
+
+    def run_script(self, requires, script_name):
+        """Locate distribution for `requires` and run `script_name` script"""
+        ns = sys._getframe(1).f_globals
+        name = ns['__name__']
+        ns.clear()
+        ns['__name__'] = name
+        self.require(requires)[0].run_script(script_name, ns)
+
+
+
+    def __iter__(self):
+        """Yield distributions for non-duplicate projects in the working set
+
+        The yield order is the order in which the items' path entries were
+        added to the working set.
+        """
+        seen = {}
+        for item in self.entries:
+            if item not in self.entry_keys:
+                # workaround a cache issue
+                continue
+
+            for key in self.entry_keys[item]:
+                if key not in seen:
+                    seen[key]=1
+                    yield self.by_key[key]
+
+    def add(self, dist, entry=None, insert=True):
+        """Add `dist` to working set, associated with `entry`
+
+        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+        On exit from this routine, `entry` is added to the end of the working
+        set's ``.entries`` (if it wasn't already present).
+
+        `dist` is only added to the working set if it's for a project that
+        doesn't already have a distribution in the set.  If it's added, any
+        callbacks registered with the ``subscribe()`` method will be called.
+        """
+        if insert:
+            dist.insert_on(self.entries, entry)
+
+        if entry is None:
+            entry = dist.location
+        keys = self.entry_keys.setdefault(entry,[])
+        keys2 = self.entry_keys.setdefault(dist.location,[])
+        if dist.key in self.by_key:
+            return      # ignore hidden distros
+
+        self.by_key[dist.key] = dist
+        if dist.key not in keys:
+            keys.append(dist.key)
+        if dist.key not in keys2:
+            keys2.append(dist.key)
+        self._added_new(dist)
+
+    def resolve(self, requirements, env=None, installer=None, replacement=True):
+        """List all distributions needed to (recursively) meet `requirements`
+
+        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+        if supplied, should be an ``Environment`` instance.  If
+        not supplied, it defaults to all distributions available within any
+        entry or distribution in the working set.  `installer`, if supplied,
+        will be invoked with each requirement that cannot be met by an
+        already-installed distribution; it should return a ``Distribution`` or
+        ``None``.
+        """
+
+        requirements = list(requirements)[::-1]  # set up the stack
+        processed = {}  # set of processed requirements
+        best = {}  # key -> dist
+        to_activate = []
+
+        while requirements:
+            req = requirements.pop(0)   # process dependencies breadth-first
+            if _override_setuptools(req) and replacement:
+                req = Requirement.parse('distribute')
+
+            if req in processed:
+                # Ignore cyclic or redundant dependencies
+                continue
+            dist = best.get(req.key)
+            if dist is None:
+                # Find the best distribution and add it to the map
+                dist = self.by_key.get(req.key)
+                if dist is None:
+                    if env is None:
+                        env = Environment(self.entries)
+                    dist = best[req.key] = env.best_match(req, self, installer)
+                    if dist is None:
+                        #msg = ("The '%s' distribution was not found on this "
+                        #       "system, and is required by this application.")
+                        #raise DistributionNotFound(msg % req)
+
+                        # unfortunately, zc.buildout uses a str(err)
+                        # to get the name of the distribution here..
+                        raise DistributionNotFound(req)
+                to_activate.append(dist)
+            if dist not in req:
+                # Oops, the "best" so far conflicts with a dependency
+                raise VersionConflict(dist,req) # XXX put more info here
+            requirements.extend(dist.requires(req.extras)[::-1])
+            processed[req] = True
+
+        return to_activate    # return list of distros to activate
+
+    def find_plugins(self,
+        plugin_env, full_env=None, installer=None, fallback=True
+    ):
+        """Find all activatable distributions in `plugin_env`
+
+        Example usage::
+
+            distributions, errors = working_set.find_plugins(
+                Environment(plugin_dirlist)
+            )
+            map(working_set.add, distributions)  # add plugins+libs to sys.path
+            print 'Could not load', errors        # display errors
+
+        The `plugin_env` should be an ``Environment`` instance that contains
+        only distributions that are in the project's "plugin directory" or
+        directories. The `full_env`, if supplied, should be an ``Environment``
+        contains all currently-available distributions.  If `full_env` is not
+        supplied, one is created automatically from the ``WorkingSet`` this
+        method is called on, which will typically mean that every directory on
+        ``sys.path`` will be scanned for distributions.
+
+        `installer` is a standard installer callback as used by the
+        ``resolve()`` method. The `fallback` flag indicates whether we should
+        attempt to resolve older versions of a plugin if the newest version
+        cannot be resolved.
+
+        This method returns a 2-tuple: (`distributions`, `error_info`), where
+        `distributions` is a list of the distributions found in `plugin_env`
+        that were loadable, along with any other distributions that are needed
+        to resolve their dependencies.  `error_info` is a dictionary mapping
+        unloadable plugin distributions to an exception instance describing the
+        error that occurred. Usually this will be a ``DistributionNotFound`` or
+        ``VersionConflict`` instance.
+        """
+
+        plugin_projects = list(plugin_env)
+        plugin_projects.sort()  # scan project names in alphabetic order
+
+        error_info = {}
+        distributions = {}
+
+        if full_env is None:
+            env = Environment(self.entries)
+            env += plugin_env
+        else:
+            env = full_env + plugin_env
+
+        shadow_set = self.__class__([])
+        map(shadow_set.add, self)   # put all our entries in shadow_set
+
+        for project_name in plugin_projects:
+
+            for dist in plugin_env[project_name]:
+
+                req = [dist.as_requirement()]
+
+                try:
+                    resolvees = shadow_set.resolve(req, env, installer)
+
+                except ResolutionError,v:
+                    error_info[dist] = v    # save error info
+                    if fallback:
+                        continue    # try the next older version of project
+                    else:
+                        break       # give up on this project, keep going
+
+                else:
+                    map(shadow_set.add, resolvees)
+                    distributions.update(dict.fromkeys(resolvees))
+
+                    # success, no need to try any more versions of this project
+                    break
+
+        distributions = list(distributions)
+        distributions.sort()
+
+        return distributions, error_info
+
+
+
+
+
+    def require(self, *requirements):
+        """Ensure that distributions matching `requirements` are activated
+
+        `requirements` must be a string or a (possibly-nested) sequence
+        thereof, specifying the distributions and versions required.  The
+        return value is a sequence of the distributions that needed to be
+        activated to fulfill the requirements; all relevant distributions are
+        included, even if they were already activated in this working set.
+        """
+
+        needed = self.resolve(parse_requirements(requirements))
+
+        for dist in needed:
+            self.add(dist)
+
+        return needed
+
+
+    def subscribe(self, callback):
+        """Invoke `callback` for all distributions (including existing ones)"""
+        if callback in self.callbacks:
+            return
+        self.callbacks.append(callback)
+        for dist in self:
+            callback(dist)
+
+
+    def _added_new(self, dist):
+        for callback in self.callbacks:
+            callback(dist)
+
+    def __getstate__(self):
+        return (self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
+                self.callbacks[:])
+
+    def __setstate__(self, (entries, keys, by_key, callbacks)):
+        self.entries = entries[:]
+        self.entry_keys = keys.copy()
+        self.by_key = by_key.copy()
+        self.callbacks = callbacks[:]
+
+
+
+
+class Environment(object):
+    """Searchable snapshot of distributions on a search path"""
+
+    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
+        """Snapshot distributions available on a search path
+
+        Any distributions found on `search_path` are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.
+
+        `platform` is an optional string specifying the name of the platform
+        that platform-specific distributions must be compatible with.  If
+        unspecified, it defaults to the current platform.  `python` is an
+        optional string naming the desired version of Python (e.g. ``'2.4'``);
+        it defaults to the current version.
+
+        You may explicitly set `platform` (and/or `python`) to ``None`` if you
+        wish to map *all* distributions, not just those compatible with the
+        running platform or Python version.
+        """
+        self._distmap = {}
+        self._cache = {}
+        self.platform = platform
+        self.python = python
+        self.scan(search_path)
+
+    def can_add(self, dist):
+        """Is distribution `dist` acceptable for this environment?
+
+        The distribution must match the platform and python version
+        requirements specified when this environment was created, or False
+        is returned.
+        """
+        return (self.python is None or dist.py_version is None
+            or dist.py_version==self.python) \
+           and compatible_platforms(dist.platform,self.platform)
+
+    def remove(self, dist):
+        """Remove `dist` from the environment"""
+        self._distmap[dist.key].remove(dist)
+
+    def scan(self, search_path=None):
+        """Scan `search_path` for distributions usable in this environment
+
+        Any distributions found are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.  Only distributions conforming to
+        the platform/python version defined at initialization are added.
+        """
+        if search_path is None:
+            search_path = sys.path
+
+        for item in search_path:
+            for dist in find_distributions(item):
+                self.add(dist)
+
+    def __getitem__(self,project_name):
+        """Return a newest-to-oldest list of distributions for `project_name`
+        """
+        try:
+            return self._cache[project_name]
+        except KeyError:
+            project_name = project_name.lower()
+            if project_name not in self._distmap:
+                return []
+
+        if project_name not in self._cache:
+            dists = self._cache[project_name] = self._distmap[project_name]
+            _sort_dists(dists)
+
+        return self._cache[project_name]
+
+    def add(self,dist):
+        """Add `dist` if we ``can_add()`` it and it isn't already added"""
+        if self.can_add(dist) and dist.has_version():
+            dists = self._distmap.setdefault(dist.key,[])
+            if dist not in dists:
+                dists.append(dist)
+                if dist.key in self._cache:
+                    _sort_dists(self._cache[dist.key])
+
+
+    def best_match(self, req, working_set, installer=None):
+        """Find distribution best matching `req` and usable on `working_set`
+
+        This calls the ``find(req)`` method of the `working_set` to see if a
+        suitable distribution is already active.  (This may raise
+        ``VersionConflict`` if an unsuitable version of the project is already
+        active in the specified `working_set`.)  If a suitable distribution
+        isn't active, this method returns the newest distribution in the
+        environment that meets the ``Requirement`` in `req`.  If no suitable
+        distribution is found, and `installer` is supplied, then the result of
+        calling the environment's ``obtain(req, installer)`` method will be
+        returned.
+        """
+        dist = working_set.find(req)
+        if dist is not None:
+            return dist
+        for dist in self[req.key]:
+            if dist in req:
+                return dist
+        return self.obtain(req, installer) # try and download/install
+
+    def obtain(self, requirement, installer=None):
+        """Obtain a distribution matching `requirement` (e.g. via download)
+
+        Obtain a distro that matches requirement (e.g. via download).  In the
+        base ``Environment`` class, this routine just returns
+        ``installer(requirement)``, unless `installer` is None, in which case
+        None is returned instead.  This method is a hook that allows subclasses
+        to attempt other ways of obtaining a distribution before falling back
+        to the `installer` argument."""
+        if installer is not None:
+            return installer(requirement)
+
+    def __iter__(self):
+        """Yield the unique project names of the available distributions"""
+        for key in self._distmap.keys():
+            if self[key]: yield key
+
+
+
+
+    def __iadd__(self, other):
+        """In-place addition of a distribution or environment"""
+        if isinstance(other,Distribution):
+            self.add(other)
+        elif isinstance(other,Environment):
+            for project in other:
+                for dist in other[project]:
+                    self.add(dist)
+        else:
+            raise TypeError("Can't add %r to environment" % (other,))
+        return self
+
+    def __add__(self, other):
+        """Add an environment or distribution to an environment"""
+        new = self.__class__([], platform=None, python=None)
+        for env in self, other:
+            new += env
+        return new
+
+
+AvailableDistributions = Environment    # XXX backward compatibility
+
+
+class ExtractionError(RuntimeError):
+    """An error occurred extracting a resource
+
+    The following attributes are available from instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+    """
+
+
+
+
+class ResourceManager:
+    """Manage resource extraction and packages"""
+    extraction_path = None
+
+    def __init__(self):
+        self.cached_files = {}
+
+    def resource_exists(self, package_or_requirement, resource_name):
+        """Does the named resource exist?"""
+        return get_provider(package_or_requirement).has_resource(resource_name)
+
+    def resource_isdir(self, package_or_requirement, resource_name):
+        """Is the named resource an existing directory?"""
+        return get_provider(package_or_requirement).resource_isdir(
+            resource_name
+        )
+
+    def resource_filename(self, package_or_requirement, resource_name):
+        """Return a true filesystem path for specified resource"""
+        return get_provider(package_or_requirement).get_resource_filename(
+            self, resource_name
+        )
+
+    def resource_stream(self, package_or_requirement, resource_name):
+        """Return a readable file-like object for specified resource"""
+        return get_provider(package_or_requirement).get_resource_stream(
+            self, resource_name
+        )
+
+    def resource_string(self, package_or_requirement, resource_name):
+        """Return specified resource as a string"""
+        return get_provider(package_or_requirement).get_resource_string(
+            self, resource_name
+        )
+
+    def resource_listdir(self, package_or_requirement, resource_name):
+        """List the contents of the named resource directory"""
+        return get_provider(package_or_requirement).resource_listdir(
+            resource_name
+        )
+
+    def extraction_error(self):
+        """Give an error message for problems extracting file(s)"""
+
+        old_exc = sys.exc_info()[1]
+        cache_path = self.extraction_path or get_default_cache()
+
+        err = ExtractionError("""Can't extract file(s) to egg cache
+
+The following error occurred while trying to extract file(s) to the Python egg
+cache:
+
+  %s
+
+The Python egg cache directory is currently set to:
+
+  %s
+
+Perhaps your account does not have write access to this directory?  You can
+change the cache directory by setting the PYTHON_EGG_CACHE environment
+variable to point to an accessible directory.
+"""         % (old_exc, cache_path)
+        )
+        err.manager        = self
+        err.cache_path     = cache_path
+        err.original_error = old_exc
+        raise err
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def get_cache_path(self, archive_name, names=()):
+        """Return absolute location in cache for `archive_name` and `names`
+
+        The parent directory of the resulting path will be created if it does
+        not already exist.  `archive_name` should be the base filename of the
+        enclosing egg (which may not be the name of the enclosing zipfile!),
+        including its ".egg" extension.  `names`, if provided, should be a
+        sequence of path name parts "under" the egg's extraction location.
+
+        This method should only be called by resource providers that need to
+        obtain an extraction location, and only for names they intend to
+        extract, as it tracks the generated names for possible cleanup later.
+        """
+        extract_path = self.extraction_path or get_default_cache()
+        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
+        try:
+            _bypass_ensure_directory(target_path)
+        except:
+            self.extraction_error()
+
+        self.cached_files[target_path] = 1
+        return target_path
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def postprocess(self, tempname, filename):
+        """Perform any platform-specific postprocessing of `tempname`
+
+        This is where Mac header rewrites should be done; other platforms don't
+        have anything special they should do.
+
+        Resource providers should call this method ONLY after successfully
+        extracting a compressed resource.  They must NOT call it on resources
+        that are already in the filesystem.
+
+        `tempname` is the current (temporary) name of the file, and `filename`
+        is the name it will be renamed to by the caller after this routine
+        returns.
+        """
+
+        if os.name == 'posix':
+            # Make the resource executable
+            mode = ((os.stat(tempname).st_mode) | 0555) & 07777
+            os.chmod(tempname, mode)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def set_extraction_path(self, path):
+        """Set the base path where resources will be extracted to, if needed.
+
+        If you do not call this routine before any extractions take place, the
+        path defaults to the return value of ``get_default_cache()``.  (Which
+        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+        platform-specific fallbacks.  See that routine's documentation for more
+        details.)
+
+        Resources are extracted to subdirectories of this path based upon
+        information given by the ``IResourceProvider``.  You may set this to a
+        temporary directory, but then you must call ``cleanup_resources()`` to
+        delete the extracted files when done.  There is no guarantee that
+        ``cleanup_resources()`` will be able to remove all extracted files.
+
+        (Note: you may not change the extraction path for a given resource
+        manager once resources have been extracted, unless you first call
+        ``cleanup_resources()``.)
+        """
+        if self.cached_files:
+            raise ValueError(
+                "Can't change extraction path, files already extracted"
+            )
+
+        self.extraction_path = path
+
+    def cleanup_resources(self, force=False):
+        """
+        Delete all extracted resource files and directories, returning a list
+        of the file and directory names that could not be successfully removed.
+        This function does not have any concurrency protection, so it should
+        generally only be called when the extraction path is a temporary
+        directory exclusive to a single process.  This method is not
+        automatically called; you must call it explicitly or register it as an
+        ``atexit`` function if you wish to ensure cleanup of a temporary
+        directory used for extractions.
+        """
+        # XXX
+
+
+
+def get_default_cache():
+    """Determine the default cache location
+
+    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
+    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
+    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
+    """
+    try:
+        return os.environ['PYTHON_EGG_CACHE']
+    except KeyError:
+        pass
+
+    if os.name!='nt':
+        return os.path.expanduser('~/.python-eggs')
+
+    app_data = 'Application Data'   # XXX this may be locale-specific!
+    app_homes = [
+        (('APPDATA',), None),       # best option, should be locale-safe
+        (('USERPROFILE',), app_data),
+        (('HOMEDRIVE','HOMEPATH'), app_data),
+        (('HOMEPATH',), app_data),
+        (('HOME',), None),
+        (('WINDIR',), app_data),    # 95/98/ME
+    ]
+
+    for keys, subdir in app_homes:
+        dirname = ''
+        for key in keys:
+            if key in os.environ:
+                dirname = os.path.join(dirname, os.environ[key])
+            else:
+                break
+        else:
+            if subdir:
+                dirname = os.path.join(dirname,subdir)
+            return os.path.join(dirname, 'Python-Eggs')
+    else:
+        raise RuntimeError(
+            "Please set the PYTHON_EGG_CACHE enviroment variable"
+        )
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ','.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-','_')
+
+
+
+
+
+
+
+
+class NullProvider:
+    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+    egg_name = None
+    egg_info = None
+    loader = None
+
+    def __init__(self, module):
+        self.loader = getattr(module, '__loader__', None)
+        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+    def get_resource_filename(self, manager, resource_name):
+        return self._fn(self.module_path, resource_name)
+
+    def get_resource_stream(self, manager, resource_name):
+        return StringIO(self.get_resource_string(manager, resource_name))
+
+    def get_resource_string(self, manager, resource_name):
+        return self._get(self._fn(self.module_path, resource_name))
+
+    def has_resource(self, resource_name):
+        return self._has(self._fn(self.module_path, resource_name))
+
+    def has_metadata(self, name):
+        return self.egg_info and self._has(self._fn(self.egg_info,name))
+
+    if sys.version_info <= (3,):
+        def get_metadata(self, name):
+            if not self.egg_info:
+                return ""
+            return self._get(self._fn(self.egg_info,name))
+    else:
+        def get_metadata(self, name):
+            if not self.egg_info:
+                return ""
+            return self._get(self._fn(self.egg_info,name)).decode("utf-8")
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+    def resource_isdir(self,resource_name):
+        return self._isdir(self._fn(self.module_path, resource_name))
+
+    def metadata_isdir(self,name):
+        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
+
+
+    def resource_listdir(self,resource_name):
+        return self._listdir(self._fn(self.module_path,resource_name))
+
+    def metadata_listdir(self,name):
+        if self.egg_info:
+            return self._listdir(self._fn(self.egg_info,name))
+        return []
+
+    def run_script(self,script_name,namespace):
+        script = 'scripts/'+script_name
+        if not self.has_metadata(script):
+            raise ResolutionError("No script named %r" % script_name)
+        script_text = self.get_metadata(script).replace('\r\n','\n')
+        script_text = script_text.replace('\r','\n')
+        script_filename = self._fn(self.egg_info,script)
+        namespace['__file__'] = script_filename
+        if os.path.exists(script_filename):
+            execfile(script_filename, namespace, namespace)
+        else:
+            from linecache import cache
+            cache[script_filename] = (
+                len(script_text), 0, script_text.split('\n'), script_filename
+            )
+            script_code = compile(script_text,script_filename,'exec')
+            exec script_code in namespace, namespace
+
+    def _has(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _isdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _listdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _fn(self, base, resource_name):
+        if resource_name:
+            return os.path.join(base, *resource_name.split('/'))
+        return base
+
+    def _get(self, path):
+        if hasattr(self.loader, 'get_data'):
+            return self.loader.get_data(path)
+        raise NotImplementedError(
+            "Can't perform this operation for loaders without 'get_data()'"
+        )
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+    """Provider based on a virtual filesystem"""
+
+    def __init__(self,module):
+        NullProvider.__init__(self,module)
+        self._setup_prefix()
+
+    def _setup_prefix(self):
+        # we assume here that our metadata may be nested inside a "basket"
+        # of multiple eggs; that's why we use module_path instead of .archive
+        path = self.module_path
+        old = None
+        while path!=old:
+            if path.lower().endswith('.egg'):
+                self.egg_name = os.path.basename(path)
+                self.egg_info = os.path.join(path, 'EGG-INFO')
+                self.egg_root = path
+                break
+            old = path
+            path, base = os.path.split(path)
+
+
+
+
+
+
+class DefaultProvider(EggProvider):
+    """Provides access to package resources in the filesystem"""
+
+    def _has(self, path):
+        return os.path.exists(path)
+
+    def _isdir(self,path):
+        return os.path.isdir(path)
+
+    def _listdir(self,path):
+        return os.listdir(path)
+
+    def get_resource_stream(self, manager, resource_name):
+        return open(self._fn(self.module_path, resource_name), 'rb')
+
+    def _get(self, path):
+        stream = open(path, 'rb')
+        try:
+            return stream.read()
+        finally:
+            stream.close()
+
+register_loader_type(type(None), DefaultProvider)
+
+if importlib_bootstrap is not None:
+    register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider)
+
+
+class EmptyProvider(NullProvider):
+    """Provider that returns nothing for all requests"""
+
+    _isdir = _has = lambda self,path: False
+    _get          = lambda self,path: ''
+    _listdir      = lambda self,path: []
+    module_path   = None
+
+    def __init__(self):
+        pass
+
+empty_provider = EmptyProvider()
+
+
+
+
+class ZipProvider(EggProvider):
+    """Resource support for zips and eggs"""
+
+    eagers = None
+
+    def __init__(self, module):
+        EggProvider.__init__(self,module)
+        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
+        self.zip_pre = self.loader.archive+os.sep
+
+    def _zipinfo_name(self, fspath):
+        # Convert a virtual filename (full path to file) into a zipfile subpath
+        # usable with the zipimport directory cache for our target archive
+        if fspath.startswith(self.zip_pre):
+            return fspath[len(self.zip_pre):]
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath,self.zip_pre)
+        )
+
+    def _parts(self,zip_path):
+        # Convert a zipfile subpath into an egg-relative path part list
+        fspath = self.zip_pre+zip_path  # pseudo-fs path
+        if fspath.startswith(self.egg_root+os.sep):
+            return fspath[len(self.egg_root)+1:].split(os.sep)
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath,self.egg_root)
+        )
+
+    def get_resource_filename(self, manager, resource_name):
+        if not self.egg_name:
+            raise NotImplementedError(
+                "resource_filename() only supported for .egg, not .zip"
+            )
+        # no need to lock for extraction, since we use temp names
+        zip_path = self._resource_to_zip(resource_name)
+        eagers = self._get_eager_resources()
+        if '/'.join(self._parts(zip_path)) in eagers:
+            for name in eagers:
+                self._extract_resource(manager, self._eager_to_zip(name))
+        return self._extract_resource(manager, zip_path)
+
+    def _extract_resource(self, manager, zip_path):
+
+        if zip_path in self._index():
+            for name in self._index()[zip_path]:
+                last = self._extract_resource(
+                    manager, os.path.join(zip_path, name)
+                )
+            return os.path.dirname(last)  # return the extracted directory name
+
+        zip_stat = self.zipinfo[zip_path]
+        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
+        date_time = (
+            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
+            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
+        )
+        timestamp = time.mktime(date_time)
+
+        try:
+            if not WRITE_SUPPORT:
+                raise IOError('"os.rename" and "os.unlink" are not supported '
+                              'on this platform')
+
+            real_path = manager.get_cache_path(
+                self.egg_name, self._parts(zip_path)
+            )
+
+            if os.path.isfile(real_path):
+                stat = os.stat(real_path)
+                if stat.st_size==size and stat.st_mtime==timestamp:
+                    # size and stamp match, don't bother extracting
+                    return real_path
+
+            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
+            os.write(outf, self.loader.get_data(zip_path))
+            os.close(outf)
+            utime(tmpnam, (timestamp,timestamp))
+            manager.postprocess(tmpnam, real_path)
+
+            try:
+                rename(tmpnam, real_path)
+
+            except os.error:
+                if os.path.isfile(real_path):
+                    stat = os.stat(real_path)
+
+                    if stat.st_size==size and stat.st_mtime==timestamp:
+                        # size and stamp match, somebody did it just ahead of
+                        # us, so we're done
+                        return real_path
+                    elif os.name=='nt':     # Windows, del old file and retry
+                        unlink(real_path)
+                        rename(tmpnam, real_path)
+                        return real_path
+                raise
+
+        except os.error:
+            manager.extraction_error()  # report a user-friendly error
+
+        return real_path
+
+    def _get_eager_resources(self):
+        if self.eagers is None:
+            eagers = []
+            for name in ('native_libs.txt', 'eager_resources.txt'):
+                if self.has_metadata(name):
+                    eagers.extend(self.get_metadata_lines(name))
+            self.eagers = eagers
+        return self.eagers
+
+    def _index(self):
+        try:
+            return self._dirindex
+        except AttributeError:
+            ind = {}
+            for path in self.zipinfo:
+                parts = path.split(os.sep)
+                while parts:
+                    parent = os.sep.join(parts[:-1])
+                    if parent in ind:
+                        ind[parent].append(parts[-1])
+                        break
+                    else:
+                        ind[parent] = [parts.pop()]
+            self._dirindex = ind
+            return ind
+
+    def _has(self, fspath):
+        zip_path = self._zipinfo_name(fspath)
+        return zip_path in self.zipinfo or zip_path in self._index()
+
+    def _isdir(self,fspath):
+        return self._zipinfo_name(fspath) in self._index()
+
+    def _listdir(self,fspath):
+        return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+    def _eager_to_zip(self,resource_name):
+        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
+
+    def _resource_to_zip(self,resource_name):
+        return self._zipinfo_name(self._fn(self.module_path,resource_name))
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class FileMetadata(EmptyProvider):
+    """Metadata handler for standalone PKG-INFO files
+
+    Usage::
+
+        metadata = FileMetadata("/path/to/PKG-INFO")
+
+    This provider rejects all data and metadata requests except for PKG-INFO,
+    which is treated as existing, and will be the contents of the file at
+    the provided location.
+    """
+
+    def __init__(self,path):
+        self.path = path
+
+    def has_metadata(self,name):
+        return name=='PKG-INFO'
+
+    def get_metadata(self,name):
+        if name=='PKG-INFO':
+            f = open(self.path,'rU')
+            metadata = f.read()
+            f.close()
+            return metadata
+        raise KeyError("No metadata except PKG-INFO is available")
+
+    def get_metadata_lines(self,name):
+        return yield_lines(self.get_metadata(name))
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class PathMetadata(DefaultProvider):
+    """Metadata provider for egg directories
+
+    Usage::
+
+        # Development eggs:
+
+        egg_info = "/path/to/PackageName.egg-info"
+        base_dir = os.path.dirname(egg_info)
+        metadata = PathMetadata(base_dir, egg_info)
+        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
+
+        # Unpacked egg directories:
+
+        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+        dist = Distribution.from_filename(egg_path, metadata=metadata)
+    """
+
+    def __init__(self, path, egg_info):
+        self.module_path = path
+        self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+    """Metadata provider for .egg files"""
+
+    def __init__(self, importer):
+        """Create a metadata provider from a zipimporter"""
+
+        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
+        self.zip_pre = importer.archive+os.sep
+        self.loader = importer
+        if importer.prefix:
+            self.module_path = os.path.join(importer.archive, importer.prefix)
+        else:
+            self.module_path = importer.archive
+        self._setup_prefix()
+
+
+class ImpWrapper:
+    """PEP 302 Importer that wraps Python's "normal" import algorithm"""
+
+    def __init__(self, path=None):
+        self.path = path
+
+    def find_module(self, fullname, path=None):
+        subname = fullname.split(".")[-1]
+        if subname != fullname and self.path is None:
+            return None
+        if self.path is None:
+            path = None
+        else:
+            path = [self.path]
+        try:
+            file, filename, etc = imp.find_module(subname, path)
+        except ImportError:
+            return None
+        return ImpLoader(file, filename, etc)
+
+
+class ImpLoader:
+    """PEP 302 Loader that wraps Python's "normal" import algorithm"""
+
+    def __init__(self, file, filename, etc):
+        self.file = file
+        self.filename = filename
+        self.etc = etc
+
+    def load_module(self, fullname):
+        try:
+            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
+        finally:
+            if self.file: self.file.close()
+        # Note: we don't set __loader__ because we want the module to look
+        # normal; i.e. this is just a wrapper for standard import machinery
+        return mod
+
+
+
+
+def get_importer(path_item):
+    """Retrieve a PEP 302 "importer" for the given path item
+
+    If there is no importer, this returns a wrapper around the builtin import
+    machinery.  The returned importer is only cached if it was created by a
+    path hook.
+    """
+    try:
+        importer = sys.path_importer_cache[path_item]
+    except KeyError:
+        for hook in sys.path_hooks:
+            try:
+                importer = hook(path_item)
+            except ImportError:
+                pass
+            else:
+                break
+        else:
+            importer = None
+
+    sys.path_importer_cache.setdefault(path_item,importer)
+    if importer is None:
+        try:
+            importer = ImpWrapper(path_item)
+        except ImportError:
+            pass
+    return importer
+
+try:
+    from pkgutil import get_importer, ImpImporter
+except ImportError:
+    pass    # Python 2.3 or 2.4, use our own implementation
+else:
+    ImpWrapper = ImpImporter    # Python 2.5, use pkgutil's implementation
+    del ImpLoader, ImpImporter
+
+
+
+
+
+
+_declare_state('dict', _distribution_finders = {})
+
+def register_finder(importer_type, distribution_finder):
+    """Register `distribution_finder` to find distributions in sys.path items
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `distribution_finder` is a callable that, passed a path
+    item and the importer instance, yields ``Distribution`` instances found on
+    that path item.  See ``pkg_resources.find_on_path`` for an example."""
+    _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+    """Yield distributions accessible via `path_item`"""
+    importer = get_importer(path_item)
+    finder = _find_adapter(_distribution_finders, importer)
+    return finder(importer, path_item, only)
+
+def find_in_zip(importer, path_item, only=False):
+    metadata = EggMetadata(importer)
+    if metadata.has_metadata('PKG-INFO'):
+        yield Distribution.from_filename(path_item, metadata=metadata)
+    if only:
+        return  # don't yield nested distros
+    for subitem in metadata.resource_listdir('/'):
+        if subitem.endswith('.egg'):
+            subpath = os.path.join(path_item, subitem)
+            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
+                yield dist
+
+register_finder(zipimport.zipimporter, find_in_zip)
+
+def StringIO(*args, **kw):
+    """Thunk to load the real StringIO on demand"""
+    global StringIO
+    try:
+        from cStringIO import StringIO
+    except ImportError:
+        from StringIO import StringIO
+    return StringIO(*args,**kw)
+
+def find_nothing(importer, path_item, only=False):
+    return ()
+register_finder(object,find_nothing)
+
+def find_on_path(importer, path_item, only=False):
+    """Yield distributions accessible on a sys.path directory"""
+    path_item = _normalize_cached(path_item)
+
+    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
+        if path_item.lower().endswith('.egg'):
+            # unpacked egg
+            yield Distribution.from_filename(
+                path_item, metadata=PathMetadata(
+                    path_item, os.path.join(path_item,'EGG-INFO')
+                )
+            )
+        else:
+            # scan for .egg and .egg-info in directory
+            for entry in os.listdir(path_item):
+                lower = entry.lower()
+                if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
+                    fullpath = os.path.join(path_item, entry)
+                    if os.path.isdir(fullpath):
+                        # egg-info directory, allow getting metadata
+                        metadata = PathMetadata(path_item, fullpath)
+                    else:
+                        metadata = FileMetadata(fullpath)
+                    yield Distribution.from_location(
+                        path_item,entry,metadata,precedence=DEVELOP_DIST
+                    )
+                elif not only and lower.endswith('.egg'):
+                    for dist in find_distributions(os.path.join(path_item, entry)):
+                        yield dist
+                elif not only and lower.endswith('.egg-link'):
+                    entry_file = open(os.path.join(path_item, entry))
+                    try:
+                        entry_lines = entry_file.readlines()
+                    finally:
+                        entry_file.close()
+                    for line in entry_lines:
+                        if not line.strip(): continue
+                        for item in find_distributions(os.path.join(path_item,line.rstrip())):
+                            yield item
+                        break
+register_finder(ImpWrapper,find_on_path)
+
+if importlib_bootstrap is not None:
+    register_finder(importlib_bootstrap.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+    """Register `namespace_handler` to declare namespace packages
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `namespace_handler` is a callable like this::
+
+        def namespace_handler(importer,path_entry,moduleName,module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the importer object has already
+    agreed that it can handle the relevant path item, and they should only
+    return a subpath if the module __path__ does not already contain an
+    equivalent subpath.  For an example namespace handler, see
+    ``pkg_resources.file_ns_handler``.
+    """
+    _namespace_handlers[importer_type] = namespace_handler
+
+def _handle_ns(packageName, path_item):
+    """Ensure that named package includes a subpath of path_item (if needed)"""
+    importer = get_importer(path_item)
+    if importer is None:
+        return None
+    loader = importer.find_module(packageName)
+    if loader is None:
+        return None
+    module = sys.modules.get(packageName)
+    if module is None:
+        module = sys.modules[packageName] = types.ModuleType(packageName)
+        module.__path__ = []; _set_parent_ns(packageName)
+    elif not hasattr(module,'__path__'):
+        raise TypeError("Not a package:", packageName)
+    handler = _find_adapter(_namespace_handlers, importer)
+    subpath = handler(importer,path_item,packageName,module)
+    if subpath is not None:
+        path = module.__path__; path.append(subpath)
+        loader.load_module(packageName); module.__path__ = path
+    return subpath
+
+def declare_namespace(packageName):
+    """Declare that package 'packageName' is a namespace package"""
+
+    imp.acquire_lock()
+    try:
+        if packageName in _namespace_packages:
+            return
+
+        path, parent = sys.path, None
+        if '.' in packageName:
+            parent = '.'.join(packageName.split('.')[:-1])
+            declare_namespace(parent)
+            if parent not in _namespace_packages:
+                __import__(parent)
+            try:
+                path = sys.modules[parent].__path__
+            except AttributeError:
+                raise TypeError("Not a package:", parent)
+
+        # Track what packages are namespaces, so when new path items are added,
+        # they can be updated
+        _namespace_packages.setdefault(parent,[]).append(packageName)
+        _namespace_packages.setdefault(packageName,[])
+
+        for path_item in path:
+            # Ensure all the parent's path items are reflected in the child,
+            # if they apply
+            _handle_ns(packageName, path_item)
+
+    finally:
+        imp.release_lock()
+
+def fixup_namespace_packages(path_item, parent=None):
+    """Ensure that previously-declared namespace packages include path_item"""
+    imp.acquire_lock()
+    try:
+        for package in _namespace_packages.get(parent,()):
+            subpath = _handle_ns(package, path_item)
+            if subpath: fixup_namespace_packages(subpath,package)
+    finally:
+        imp.release_lock()
+
+def file_ns_handler(importer, path_item, packageName, module):
+    """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+    subpath = os.path.join(path_item, packageName.split('.')[-1])
+    normalized = _normalize_cached(subpath)
+    for item in module.__path__:
+        if _normalize_cached(item)==normalized:
+            break
+    else:
+        # Only return the path if it's not already there
+        return subpath
+
+register_namespace_handler(ImpWrapper,file_ns_handler)
+register_namespace_handler(zipimport.zipimporter,file_ns_handler)
+
+if importlib_bootstrap is not None:
+    register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+    return None
+
+register_namespace_handler(object,null_ns_handler)
+
+
+def normalize_path(filename):
+    """Normalize a file/dir name for comparison purposes"""
+    return os.path.normcase(os.path.realpath(filename))
+
+def _normalize_cached(filename,_cache={}):
+    try:
+        return _cache[filename]
+    except KeyError:
+        _cache[filename] = result = normalize_path(filename)
+        return result
+
+def _set_parent_ns(packageName):
+    parts = packageName.split('.')
+    name = parts.pop()
+    if parts:
+        parent = '.'.join(parts)
+        setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
+    if isinstance(strs,basestring):
+        for s in strs.splitlines():
+            s = s.strip()
+            if s and not s.startswith('#'):     # skip blank lines/comments
+                yield s
+    else:
+        for ss in strs:
+            for s in yield_lines(ss):
+                yield s
+
+LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
+CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
+DISTRO   = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
+VERSION  = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
+COMMA    = re.compile(r"\s*,").match               # comma between items
+OBRACKET = re.compile(r"\s*\[").match
+CBRACKET = re.compile(r"\s*\]").match
+MODULE   = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+    r"(?P<name>[^-]+)"
+    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
+    re.VERBOSE | re.IGNORECASE
+).match
+
+component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
+replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
+
+def _parse_version_parts(s):
+    for part in component_re.split(s):
+        part = replace(part,part)
+        if not part or part=='.':
+            continue
+        if part[:1] in '0123456789':
+            yield part.zfill(8)    # pad for numeric comparison
+        else:
+            yield '*'+part
+
+    yield '*final'  # ensure that alpha/beta/candidate are before final
+
+def parse_version(s):
+    """Convert a version string to a chronologically-sortable key
+
+    This is a rough cross between distutils' StrictVersion and LooseVersion;
+    if you give it versions that would work with StrictVersion, then it behaves
+    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
+    *possible* to create pathological version coding schemes that will fool
+    this parser, but they should be very rare in practice.
+
+    The returned value will be a tuple of strings.  Numeric portions of the
+    version are padded to 8 digits so they will compare numerically, but
+    without relying on how numbers compare relative to strings.  Dots are
+    dropped, but dashes are retained.  Trailing zeros between alpha segments
+    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
+    "2.4". Alphanumeric parts are lower-cased.
+
+    The algorithm assumes that strings like "-" and any alpha string that
+    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
+    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
+    considered newer than "2.4-1", which in turn is newer than "2.4".
+
+    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
+    come before "final" alphabetically) are assumed to be pre-release versions,
+    so that the version "2.4" is considered newer than "2.4a1".
+
+    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
+    "rc" are treated as if they were "c", i.e. as though they were release
+    candidates, and therefore are not as new as a version string that does not
+    contain them, and "dev" is replaced with an '@' so that it sorts lower than
+    than any other pre-release tag.
+    """
+    parts = []
+    for part in _parse_version_parts(s.lower()):
+        if part.startswith('*'):
+            if part<'*final':   # remove '-' before a prerelease tag
+                while parts and parts[-1]=='*final-': parts.pop()
+            # remove trailing zeros from each series of numeric parts
+            while parts and parts[-1]=='00000000':
+                parts.pop()
+        parts.append(part)
+    return tuple(parts)
+
+class EntryPoint(object):
+    """Object representing an advertised importable object"""
+
+    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+        if not MODULE(module_name):
+            raise ValueError("Invalid module name", module_name)
+        self.name = name
+        self.module_name = module_name
+        self.attrs = tuple(attrs)
+        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
+        self.dist = dist
+
+    def __str__(self):
+        s = "%s = %s" % (self.name, self.module_name)
+        if self.attrs:
+            s += ':' + '.'.join(self.attrs)
+        if self.extras:
+            s += ' [%s]' % ','.join(self.extras)
+        return s
+
+    def __repr__(self):
+        return "EntryPoint.parse(%r)" % str(self)
+
+    def load(self, require=True, env=None, installer=None):
+        if require: self.require(env, installer)
+        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
+        for attr in self.attrs:
+            try:
+                entry = getattr(entry,attr)
+            except AttributeError:
+                raise ImportError("%r has no %r attribute" % (entry,attr))
+        return entry
+
+    def require(self, env=None, installer=None):
+        if self.extras and not self.dist:
+            raise UnknownExtra("Can't require() without a distribution", self)
+        map(working_set.add,
+            working_set.resolve(self.dist.requires(self.extras),env,installer))
+
+
+
+    #@classmethod
+    def parse(cls, src, dist=None):
+        """Parse a single entry point from string `src`
+
+        Entry point syntax follows the form::
+
+            name = some.module:some.attr [extra1,extra2]
+
+        The entry name and module name are required, but the ``:attrs`` and
+        ``[extras]`` parts are optional
+        """
+        try:
+            attrs = extras = ()
+            name,value = src.split('=',1)
+            if '[' in value:
+                value,extras = value.split('[',1)
+                req = Requirement.parse("x["+extras)
+                if req.specs: raise ValueError
+                extras = req.extras
+            if ':' in value:
+                value,attrs = value.split(':',1)
+                if not MODULE(attrs.rstrip()):
+                    raise ValueError
+                attrs = attrs.rstrip().split('.')
+        except ValueError:
+            raise ValueError(
+                "EntryPoint must be in 'name=module:attrs [extras]' format",
+                src
+            )
+        else:
+            return cls(name.strip(), value.strip(), attrs, extras, dist)
+
+    parse = classmethod(parse)
+
+
+
+
+
+
+
+
+    #@classmethod
+    def parse_group(cls, group, lines, dist=None):
+        """Parse an entry point group"""
+        if not MODULE(group):
+            raise ValueError("Invalid group name", group)
+        this = {}
+        for line in yield_lines(lines):
+            ep = cls.parse(line, dist)
+            if ep.name in this:
+                raise ValueError("Duplicate entry point", group, ep.name)
+            this[ep.name]=ep
+        return this
+
+    parse_group = classmethod(parse_group)
+
+    #@classmethod
+    def parse_map(cls, data, dist=None):
+        """Parse a map of entry point groups"""
+        if isinstance(data,dict):
+            data = data.items()
+        else:
+            data = split_sections(data)
+        maps = {}
+        for group, lines in data:
+            if group is None:
+                if not lines:
+                    continue
+                raise ValueError("Entry points must be listed in groups")
+            group = group.strip()
+            if group in maps:
+                raise ValueError("Duplicate group name", group)
+            maps[group] = cls.parse_group(group, lines, dist)
+        return maps
+
+    parse_map = classmethod(parse_map)
+
+
+def _remove_md5_fragment(location):
+    if not location:
+        return ''
+    parsed = urlparse(location)
+    if parsed[-1].startswith('md5='):
+        return urlunparse(parsed[:-1] + ('',))
+    return location
+
+
+class Distribution(object):
+    """Wrap an actual or potential sys.path entry w/metadata"""
+    PKG_INFO = 'PKG-INFO'
+
+    def __init__(self,
+        location=None, metadata=None, project_name=None, version=None,
+        py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
+    ):
+        self.project_name = safe_name(project_name or 'Unknown')
+        if version is not None:
+            self._version = safe_version(version)
+        self.py_version = py_version
+        self.platform = platform
+        self.location = location
+        self.precedence = precedence
+        self._provider = metadata or empty_provider
+
+    #@classmethod
+    def from_location(cls,location,basename,metadata=None,**kw):
+        project_name, version, py_version, platform = [None]*4
+        basename, ext = os.path.splitext(basename)
+        if ext.lower() in _distributionImpl:
+            # .dist-info gets much metadata differently
+            match = EGG_NAME(basename)
+            if match:
+                project_name, version, py_version, platform = match.group(
+                    'name','ver','pyver','plat'
+                )
+            cls = _distributionImpl[ext.lower()]
+        return cls(
+            location, metadata, project_name=project_name, version=version,
+            py_version=py_version, platform=platform, **kw
+        )
+    from_location = classmethod(from_location)
+
+
+    hashcmp = property(
+        lambda self: (
+            getattr(self,'parsed_version',()),
+            self.precedence,
+            self.key,
+            _remove_md5_fragment(self.location),
+            self.py_version,
+            self.platform
+        )
+    )
+    def __hash__(self): return hash(self.hashcmp)
+    def __lt__(self, other):
+        return self.hashcmp < other.hashcmp
+    def __le__(self, other):
+        return self.hashcmp <= other.hashcmp
+    def __gt__(self, other):
+        return self.hashcmp > other.hashcmp
+    def __ge__(self, other):
+        return self.hashcmp >= other.hashcmp
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            # It's not a Distribution, so they are not equal
+            return False
+        return self.hashcmp == other.hashcmp
+    def __ne__(self, other):
+        return not self == other
+
+    # These properties have to be lazy so that we don't have to load any
+    # metadata until/unless it's actually needed.  (i.e., some distributions
+    # may not know their name or version without loading PKG-INFO)
+
+    #@property
+    def key(self):
+        try:
+            return self._key
+        except AttributeError:
+            self._key = key = self.project_name.lower()
+            return key
+    key = property(key)
+
+    #@property
+    def parsed_version(self):
+        try:
+            return self._parsed_version
+        except AttributeError:
+            self._parsed_version = pv = parse_version(self.version)
+            return pv
+
+    parsed_version = property(parsed_version)
+
+    #@property
+    def version(self):
+        try:
+            return self._version
+        except AttributeError:
+            for line in self._get_metadata(self.PKG_INFO):
+                if line.lower().startswith('version:'):
+                    self._version = safe_version(line.split(':',1)[1].strip())
+                    return self._version
+            else:
+                raise ValueError(
+                    "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self
+                )
+    version = property(version)
+
+
+
+
+    #@property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            dm = self.__dep_map = {None: []}
+            for name in 'requires.txt', 'depends.txt':
+                for extra,reqs in split_sections(self._get_metadata(name)):
+                    if extra: extra = safe_extra(extra)
+                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
+            return dm
+    _dep_map = property(_dep_map)
+
+    def requires(self,extras=()):
+        """List of Requirements needed for this distro if `extras` are used"""
+        dm = self._dep_map
+        deps = []
+        deps.extend(dm.get(None,()))
+        for ext in extras:
+            try:
+                deps.extend(dm[safe_extra(ext)])
+            except KeyError:
+                raise UnknownExtra(
+                    "%s has no such extra feature %r" % (self, ext)
+                )
+        return deps
+
+    def _get_metadata(self,name):
+        if self.has_metadata(name):
+            for line in self.get_metadata_lines(name):
+                yield line
+
+    def activate(self,path=None):
+        """Ensure distribution is importable on `path` (default=sys.path)"""
+        if path is None: path = sys.path
+        self.insert_on(path)
+        if path is sys.path:
+            fixup_namespace_packages(self.location)
+            map(declare_namespace, self._get_metadata('namespace_packages.txt'))
+
+
+    def egg_name(self):
+        """Return what this distribution's standard .egg filename should be"""
+        filename = "%s-%s-py%s" % (
+            to_filename(self.project_name), to_filename(self.version),
+            self.py_version or PY_MAJOR
+        )
+
+        if self.platform:
+            filename += '-'+self.platform
+        return filename
+
+    def __repr__(self):
+        if self.location:
+            return "%s (%s)" % (self,self.location)
+        else:
+            return str(self)
+
+    def __str__(self):
+        try: version = getattr(self,'version',None)
+        except ValueError: version = None
+        version = version or "[unknown version]"
+        return "%s %s" % (self.project_name,version)
+
+    def __getattr__(self,attr):
+        """Delegate all unrecognized public attributes to .metadata provider"""
+        if attr.startswith('_'):
+            raise AttributeError,attr
+        return getattr(self._provider, attr)
+
+    #@classmethod
+    def from_filename(cls,filename,metadata=None, **kw):
+        return cls.from_location(
+            _normalize_cached(filename), os.path.basename(filename), metadata,
+            **kw
+        )
+    from_filename = classmethod(from_filename)
+
+    def as_requirement(self):
+        """Return a ``Requirement`` that matches this distribution exactly"""
+        return Requirement.parse('%s==%s' % (self.project_name, self.version))
+
+    def load_entry_point(self, group, name):
+        """Return the `name` entry point of `group` or raise ImportError"""
+        ep = self.get_entry_info(group,name)
+        if ep is None:
+            raise ImportError("Entry point %r not found" % ((group,name),))
+        return ep.load()
+
+    def get_entry_map(self, group=None):
+        """Return the entry point map for `group`, or the full entry map"""
+        try:
+            ep_map = self._ep_map
+        except AttributeError:
+            ep_map = self._ep_map = EntryPoint.parse_map(
+                self._get_metadata('entry_points.txt'), self
+            )
+        if group is not None:
+            return ep_map.get(group,{})
+        return ep_map
+
+    def get_entry_info(self, group, name):
+        """Return the EntryPoint object for `group`+`name`, or ``None``"""
+        return self.get_entry_map(group).get(name)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def insert_on(self, path, loc = None):
+        """Insert self.location in path before its nearest parent directory"""
+
+        loc = loc or self.location
+
+        if self.project_name == 'setuptools':
+            try:
+                version = self.version
+            except ValueError:
+                version = ''
+            if '0.7' in version:
+                raise ValueError(
+                    "A 0.7-series setuptools cannot be installed "
+                    "with distribute. Found one at %s" % str(self.location))
+
+        if not loc:
+            return
+
+        if path is sys.path:
+            self.check_version_conflict()
+
+        nloc = _normalize_cached(loc)
+        bdir = os.path.dirname(nloc)
+        npath= map(_normalize_cached, path)
+
+        bp = None
+        for p, item in enumerate(npath):
+            if item==nloc:
+                break
+            elif item==bdir and self.precedence==EGG_DIST:
+                # if it's an .egg, give it precedence over its directory
+                path.insert(p, loc)
+                npath.insert(p, nloc)
+                break
+        else:
+            path.append(loc)
+            return
+
+        # p is the spot where we found or inserted loc; now remove duplicates
+        while 1:
+            try:
+                np = npath.index(nloc, p+1)
+            except ValueError:
+                break
+            else:
+                del npath[np], path[np]
+                p = np  # ha!
+
+        return
+
+
+
+    def check_version_conflict(self):
+        if self.key=='distribute':
+            return      # ignore the inevitable setuptools self-conflicts  :(
+
+        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+        loc = normalize_path(self.location)
+        for modname in self._get_metadata('top_level.txt'):
+            if (modname not in sys.modules or modname in nsp
+                or modname in _namespace_packages
+            ):
+                continue
+            if modname in ('pkg_resources', 'setuptools', 'site'):
+                continue
+            fn = getattr(sys.modules[modname], '__file__', None)
+            if fn and (normalize_path(fn).startswith(loc) or
+                       fn.startswith(self.location)):
+                continue
+            issue_warning(
+                "Module %s was already imported from %s, but %s is being added"
+                " to sys.path" % (modname, fn, self.location),
+            )
+
+    def has_version(self):
+        try:
+            self.version
+        except ValueError:
+            issue_warning("Unbuilt egg for "+repr(self))
+            return False
+        return True
+
+    def clone(self,**kw):
+        """Copy this distribution, substituting in any changed keyword args"""
+        for attr in (
+            'project_name', 'version', 'py_version', 'platform', 'location',
+            'precedence'
+        ):
+            kw.setdefault(attr, getattr(self,attr,None))
+        kw.setdefault('metadata', self._provider)
+        return self.__class__(**kw)
+
+
+
+
+    #@property
+    def extras(self):
+        return [dep for dep in self._dep_map if dep]
+    extras = property(extras)
+
+
+class DistInfoDistribution(Distribution):
+    """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
+    PKG_INFO = 'METADATA'
+    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+    @property
+    def _parsed_pkg_info(self):
+        """Parse and cache metadata"""
+        try:
+            return self._pkg_info
+        except AttributeError:
+            from email.parser import Parser
+            self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO))
+            return self._pkg_info
+
+    @property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._compute_dependencies()
+            return self.__dep_map
+
+    def _preparse_requirement(self, requires_dist):
+        """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
+        Split environment marker, add == prefix to version specifiers as
+        necessary, and remove parenthesis.
+        """
+        parts = requires_dist.split(';', 1) + ['']
+        distvers = parts[0].strip()
+        mark = parts[1].strip()
+        distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
+        distvers = distvers.replace('(', '').replace(')', '')
+        return (distvers, mark)
+
+    def _compute_dependencies(self):
+        """Recompute this distribution's dependencies."""
+        from _markerlib import compile as compile_marker
+        dm = self.__dep_map = {None: []}
+
+        reqs = []
+        # Including any condition expressions
+        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+            distvers, mark = self._preparse_requirement(req)
+            parsed = parse_requirements(distvers).next()
+            parsed.marker_fn = compile_marker(mark)
+            reqs.append(parsed)
+
+        def reqs_for_extra(extra):
+            for req in reqs:
+                if req.marker_fn(override={'extra':extra}):
+                    yield req
+
+        common = frozenset(reqs_for_extra(None))
+        dm[None].extend(common)
+
+        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+            extra = safe_extra(extra.strip())
+            dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
+
+        return dm
+
+
+_distributionImpl = {'.egg': Distribution,
+                     '.egg-info': Distribution,
+                     '.dist-info': DistInfoDistribution }
+
+
+def issue_warning(*args,**kw):
+    level = 1
+    g = globals()
+    try:
+        # find the first stack frame that is *not* code in
+        # the pkg_resources module, to use for the warning
+        while sys._getframe(level).f_globals is g:
+            level += 1
+    except ValueError:
+        pass
+    from warnings import warn
+    warn(stacklevel = level+1, *args, **kw)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def parse_requirements(strs):
+    """Yield ``Requirement`` objects for each specification in `strs`
+
+    `strs` must be an instance of ``basestring``, or a (possibly-nested)
+    iterable thereof.
+    """
+    # create a steppable iterator, so we can handle \-continuations
+    lines = iter(yield_lines(strs))
+
+    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
+
+        items = []
+
+        while not TERMINATOR(line,p):
+            if CONTINUE(line,p):
+                try:
+                    line = lines.next(); p = 0
+                except StopIteration:
+                    raise ValueError(
+                        "\\ must not appear on the last nonblank line"
+                    )
+
+            match = ITEM(line,p)
+            if not match:
+                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
+
+            items.append(match.group(*groups))
+            p = match.end()
+
+            match = COMMA(line,p)
+            if match:
+                p = match.end() # skip the comma
+            elif not TERMINATOR(line,p):
+                raise ValueError(
+                    "Expected ',' or end-of-list in",line,"at",line[p:]
+                )
+
+        match = TERMINATOR(line,p)
+        if match: p = match.end()   # skip the terminator, if any
+        return line, p, items
+
+    for line in lines:
+        match = DISTRO(line)
+        if not match:
+            raise ValueError("Missing distribution spec", line)
+        project_name = match.group(1)
+        p = match.end()
+        extras = []
+
+        match = OBRACKET(line,p)
+        if match:
+            p = match.end()
+            line, p, extras = scan_list(
+                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
+            )
+
+        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
+        specs = [(op,safe_version(val)) for op,val in specs]
+        yield Requirement(project_name, specs, extras)
+
+
+def _sort_dists(dists):
+    tmp = [(dist.hashcmp,dist) for dist in dists]
+    tmp.sort()
+    dists[::-1] = [d for hc,d in tmp]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class Requirement:
+    def __init__(self, project_name, specs, extras):
+        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+        self.unsafe_name, project_name = project_name, safe_name(project_name)
+        self.project_name, self.key = project_name, project_name.lower()
+        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
+        index.sort()
+        self.specs = [(op,ver) for parsed,trans,op,ver in index]
+        self.index, self.extras = index, tuple(map(safe_extra,extras))
+        self.hashCmp = (
+            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
+            frozenset(self.extras)
+        )
+        self.__hash = hash(self.hashCmp)
+
+    def __str__(self):
+        specs = ','.join([''.join(s) for s in self.specs])
+        extras = ','.join(self.extras)
+        if extras: extras = '[%s]' % extras
+        return '%s%s%s' % (self.project_name, extras, specs)
+
+    def __eq__(self,other):
+        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
+
+    def __contains__(self,item):
+        if isinstance(item,Distribution):
+            if item.key <> self.key: return False
+            if self.index: item = item.parsed_version  # only get if we need it
+        elif isinstance(item,basestring):
+            item = parse_version(item)
+        last = None
+        compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
+        for parsed,trans,op,ver in self.index:
+            action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
+            if action=='F':     return False
+            elif action=='T':   return True
+            elif action=='+':   last = True
+            elif action=='-' or last is None:   last = False
+        if last is None: last = True    # no rules encountered
+        return last
+
+
+    def __hash__(self):
+        return self.__hash
+
+    def __repr__(self): return "Requirement.parse(%r)" % str(self)
+
+    #@staticmethod
+    def parse(s, replacement=True):
+        reqs = list(parse_requirements(s))
+        if reqs:
+            if len(reqs) == 1:
+                founded_req = reqs[0]
+                # if asked for setuptools distribution
+                # and if distribute is installed, we want to give
+                # distribute instead
+                if _override_setuptools(founded_req) and replacement:
+                    distribute = list(parse_requirements('distribute'))
+                    if len(distribute) == 1:
+                        return distribute[0]
+                    return founded_req
+                else:
+                    return founded_req
+
+            raise ValueError("Expected only one requirement", s)
+        raise ValueError("No requirements found", s)
+
+    parse = staticmethod(parse)
+
+state_machine = {
+    #       =><
+    '<' :  '--T',
+    '<=':  'T-T',
+    '>' :  'F+F',
+    '>=':  'T+F',
+    '==':  'T..',
+    '!=':  'F++',
+}
+
+
+def _override_setuptools(req):
+    """Return True when distribute wants to override a setuptools dependency.
+
+    We want to override when the requirement is setuptools and the version is
+    a variant of 0.6.
+
+    """
+    if req.project_name == 'setuptools':
+        if not len(req.specs):
+            # Just setuptools: ok
+            return True
+        for comparator, version in req.specs:
+            if comparator in ['==', '>=', '>']:
+                if '0.7' in version:
+                    # We want some setuptools not from the 0.6 series.
+                    return False
+        return True
+    return False
+
+
+def _get_mro(cls):
+    """Get an mro for a type or classic class"""
+    if not isinstance(cls,type):
+        class cls(cls,object): pass
+        return cls.__mro__[1:]
+    return cls.__mro__
+
+def _find_adapter(registry, ob):
+    """Return an adapter factory for `ob` from `registry`"""
+    for t in _get_mro(getattr(ob, '__class__', type(ob))):
+        if t in registry:
+            return registry[t]
+
+
+def ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section,content) pairs
+
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+def _mkstemp(*args,**kw):
+    from tempfile import mkstemp
+    old_open = os.open
+    try:
+        os.open = os_open   # temporarily bypass sandboxing
+        return mkstemp(*args,**kw)
+    finally:
+        os.open = old_open  # and then put it back
+
+
+# Set up global resource manager (deliberately not state-saved)
+_manager = ResourceManager()
+def _initialize(g):
+    for name in dir(_manager):
+        if not name.startswith('_'):
+            g[name] = getattr(_manager, name)
+_initialize(globals())
+
+# Prepare the master working set and make the ``require()`` API available
+_declare_state('object', working_set = WorkingSet())
+
+try:
+    # Does the main program list any requirements?
+    from __main__ import __requires__
+except ImportError:
+    pass # No: just use the default working set based on sys.path
+else:
+    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
+    try:
+        working_set.require(__requires__)
+    except VersionConflict:     # try it without defaults already on sys.path
+        working_set = WorkingSet([])    # by starting with an empty path
+        for dist in working_set.resolve(
+            parse_requirements(__requires__), Environment()
+        ):
+            working_set.add(dist)
+        for entry in sys.path:  # add any missing entries from sys.path
+            if entry not in working_set.entries:
+                working_set.add_entry(entry)
+        sys.path[:] = working_set.entries   # then copy back to sys.path
+
+require = working_set.require
+iter_entry_points = working_set.iter_entry_points
+add_activation_listener = working_set.subscribe
+run_script = working_set.run_script
+run_main = run_script   # backward compatibility
+# Activate all distributions already on sys.path, and ensure that
+# all distributions added to the working set in the future (e.g. by
+# calling ``require()``) will get activated as well.
+add_activation_listener(lambda dist: dist.activate())
+working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
+
diff --git a/vendor/distribute-0.6.35/release.py b/vendor/distribute-0.6.35/release.py
new file mode 100644
index 0000000000000000000000000000000000000000..983703047fff06b6a41ed579125720709bb85f7c
--- /dev/null
+++ b/vendor/distribute-0.6.35/release.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+
+"""
+Script to fully automate the release process. Requires Python 2.6+
+with sphinx installed and the 'hg' command on the path.
+"""
+
+from __future__ import print_function
+
+import subprocess
+import shutil
+import os
+import sys
+import urllib2
+import getpass
+import collections
+
+try:
+	import keyring
+except Exception:
+	pass
+
+VERSION = '0.6.35'
+
+def get_next_version():
+	digits = map(int, VERSION.split('.'))
+	digits[-1] += 1
+	return '.'.join(map(str, digits))
+
+NEXT_VERSION = get_next_version()
+
+files_with_versions = ('docs/conf.py', 'setup.py', 'release.py',
+	'README.txt', 'distribute_setup.py')
+
+def get_repo_name():
+	"""
+	Get the repo name from the hgrc default path.
+	"""
+	default = subprocess.check_output('hg paths default').strip()
+	parts = default.split('/')
+	if parts[-1] == '':
+		parts.pop()
+	return '/'.join(parts[-2:])
+
+def get_mercurial_creds(system='https://bitbucket.org', username=None):
+	"""
+	Return named tuple of username,password in much the same way that
+	Mercurial would (from the keyring).
+	"""
+	# todo: consider getting this from .hgrc
+	username = username or getpass.getuser()
+	keyring_username = '@@'.join((username, system))
+	system = '@'.join((keyring_username, 'Mercurial'))
+	password = (
+		keyring.get_password(system, keyring_username)
+		if 'keyring' in globals()
+		else None
+	)
+	if not password:
+		password = getpass.getpass()
+	Credential = collections.namedtuple('Credential', 'username password')
+	return Credential(username, password)
+
+def add_milestone_and_version(version=NEXT_VERSION):
+	auth = 'Basic ' + ':'.join(get_mercurial_creds()).encode('base64').strip()
+	headers = {
+		'Authorization': auth,
+		}
+	base = 'https://api.bitbucket.org'
+	for type in 'milestones', 'versions':
+		url = (base + '/1.0/repositories/{repo}/issues/{type}'
+			.format(repo = get_repo_name(), type=type))
+		req = urllib2.Request(url = url, headers = headers,
+			data='name='+version)
+		try:
+			urllib2.urlopen(req)
+		except urllib2.HTTPError as e:
+			print(e.fp.read())
+
+def bump_versions():
+	list(map(bump_version, files_with_versions))
+
+def bump_version(filename):
+	with open(filename, 'rb') as f:
+		lines = [line.replace(VERSION, NEXT_VERSION) for line in f]
+	with open(filename, 'wb') as f:
+		f.writelines(lines)
+
+def do_release():
+	assert all(map(os.path.exists, files_with_versions)), (
+		"Expected file(s) missing")
+
+	assert has_sphinx(), "You must have Sphinx installed to release"
+
+	res = raw_input('Have you read through the SCM changelog and '
+		'confirmed the changelog is current for releasing {VERSION}? '
+		.format(**globals()))
+	if not res.lower().startswith('y'):
+		print("Please do that")
+		raise SystemExit(1)
+
+	print("Travis-CI tests: http://travis-ci.org/#!/jaraco/distribute")
+	res = raw_input('Have you or has someone verified that the tests '
+		'pass on this revision? ')
+	if not res.lower().startswith('y'):
+		print("Please do that")
+		raise SystemExit(2)
+
+	subprocess.check_call(['hg', 'tag', VERSION])
+
+	subprocess.check_call(['hg', 'update', VERSION])
+
+	has_docs = build_docs()
+	if os.path.isdir('./dist'):
+		shutil.rmtree('./dist')
+	cmd = [sys.executable, 'setup.py', '-q', 'egg_info', '-RD', '-b', '',
+		'sdist', 'register', 'upload']
+	if has_docs:
+		cmd.append('upload_docs')
+	subprocess.check_call(cmd)
+	upload_bootstrap_script()
+
+	# update to the tip for the next operation
+	subprocess.check_call(['hg', 'update'])
+
+	# we just tagged the current version, bump for the next release.
+	bump_versions()
+	subprocess.check_call(['hg', 'ci', '-m',
+		'Bumped to {NEXT_VERSION} in preparation for next '
+		'release.'.format(**globals())])
+
+	# push the changes
+	subprocess.check_call(['hg', 'push'])
+
+	add_milestone_and_version()
+
+def has_sphinx():
+	try:
+		devnull = open(os.path.devnull, 'wb')
+		subprocess.Popen(['sphinx-build', '--version'], stdout=devnull,
+			stderr=subprocess.STDOUT).wait()
+	except Exception:
+		return False
+	return True
+
+def build_docs():
+	if not os.path.isdir('docs'):
+		return
+	if os.path.isdir('docs/build'):
+		shutil.rmtree('docs/build')
+	subprocess.check_call([
+		'sphinx-build',
+		'-b', 'html',
+		'-d', 'build/doctrees',
+		'.',
+		'build/html',
+		],
+		cwd='docs')
+	return True
+
+def upload_bootstrap_script():
+	scp_command = 'pscp' if sys.platform.startswith('win') else 'scp'
+	try:
+		subprocess.check_call([scp_command, 'distribute_setup.py',
+			'pypi@ziade.org:python-distribute.org/'])
+	except:
+		print("Unable to upload bootstrap script. Ask Tarek to do it.")
+
+if __name__ == '__main__':
+	do_release()
diff --git a/vendor/distribute-0.6.35/setup.cfg b/vendor/distribute-0.6.35/setup.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..319f941216a87c0e2d41f8ab7590d6420d85a75e
--- /dev/null
+++ b/vendor/distribute-0.6.35/setup.cfg
@@ -0,0 +1,21 @@
+[egg_info]
+tag_build = 
+tag_svn_revision = 0
+tag_date = 0
+
+[aliases]
+release = egg_info -RDb ''
+source = register sdist binary
+binary = bdist_egg upload --show-response
+
+[build_sphinx]
+source-dir = docs/
+build-dir = docs/build
+all_files = 1
+
+[upload_docs]
+upload-dir = docs/build/html
+
+[sdist]
+formats = gztar
+
diff --git a/vendor/distribute-0.6.35/setup.py b/vendor/distribute-0.6.35/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..cecb9e9fdad3476a7a4123d27d055696ab68975c
--- /dev/null
+++ b/vendor/distribute-0.6.35/setup.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python
+"""Distutils setup file, used to install or test 'setuptools'"""
+import sys
+import os
+import textwrap
+import re
+
+# Allow to run setup.py from another directory.
+os.chdir(os.path.dirname(os.path.abspath(__file__)))
+
+src_root = None
+if sys.version_info >= (3,):
+    tmp_src = os.path.join("build", "src")
+    from distutils.filelist import FileList
+    from distutils import dir_util, file_util, util, log
+    log.set_verbosity(1)
+    fl = FileList()
+    manifest_file = open("MANIFEST.in")
+    for line in manifest_file:
+        fl.process_template_line(line)
+    manifest_file.close()
+    dir_util.create_tree(tmp_src, fl.files)
+    outfiles_2to3 = []
+    dist_script = os.path.join("build", "src", "distribute_setup.py")
+    for f in fl.files:
+        outf, copied = file_util.copy_file(f, os.path.join(tmp_src, f), update=1)
+        if copied and outf.endswith(".py") and outf != dist_script:
+            outfiles_2to3.append(outf)
+        if copied and outf.endswith('api_tests.txt'):
+            # XXX support this in distutils as well
+            from lib2to3.main import main
+            main('lib2to3.fixes', ['-wd', os.path.join(tmp_src, 'tests', 'api_tests.txt')])
+
+    util.run_2to3(outfiles_2to3)
+
+    # arrange setup to use the copy
+    sys.path.insert(0, os.path.abspath(tmp_src))
+    src_root = tmp_src
+
+from distutils.util import convert_path
+
+d = {}
+init_path = convert_path('setuptools/command/__init__.py')
+init_file = open(init_path)
+exec(init_file.read(), d)
+init_file.close()
+
+SETUP_COMMANDS = d['__all__']
+VERSION = "0.6.35"
+
+from setuptools import setup, find_packages
+from setuptools.command.build_py import build_py as _build_py
+from setuptools.command.test import test as _test
+
+scripts = []
+
+console_scripts = ["easy_install = setuptools.command.easy_install:main"]
+if os.environ.get("DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT") is None:
+    console_scripts.append("easy_install-%s = setuptools.command.easy_install:main" % sys.version[:3])
+
+# specific command that is used to generate windows .exe files
+class build_py(_build_py):
+    def build_package_data(self):
+        """Copy data files into build directory"""
+        lastdir = None
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                srcfile = os.path.join(src_dir, filename)
+                outf, copied = self.copy_file(srcfile, target)
+                srcfile = os.path.abspath(srcfile)
+
+                # avoid a bootstrapping issue with easy_install -U (when the
+                # previous version doesn't have convert_2to3_doctests)
+                if not hasattr(self.distribution, 'convert_2to3_doctests'):
+                    continue
+
+                if copied and srcfile in self.distribution.convert_2to3_doctests:
+                    self.__doctests_2to3.append(outf)
+
+class test(_test):
+    """Specific test class to avoid rewriting the entry_points.txt"""
+    def run(self):
+        entry_points = os.path.join('distribute.egg-info', 'entry_points.txt')
+
+        if not os.path.exists(entry_points):
+            _test.run(self)
+            return # even though _test.run will raise SystemExit
+
+        f = open(entry_points)
+
+        # running the test
+        try:
+            ep_content = f.read()
+        finally:
+            f.close()
+
+        try:
+            _test.run(self)
+        finally:
+            # restoring the file
+            f = open(entry_points, 'w')
+            try:
+                f.write(ep_content)
+            finally:
+                f.close()
+
+
+# if we are installing Distribute using "python setup.py install"
+# we need to get setuptools out of the way
+def _easy_install_marker():
+    return (len(sys.argv) == 5 and sys.argv[2] == 'bdist_egg' and
+            sys.argv[3] == '--dist-dir' and 'egg-dist-tmp-' in sys.argv[-1])
+
+def _buildout_marker():
+    command = os.environ.get('_')
+    if command:
+        return 'buildout' in os.path.basename(command)
+
+def _being_installed():
+    if os.environ.get('DONT_PATCH_SETUPTOOLS') is not None:
+        return False
+    if _buildout_marker():
+        # Installed by buildout, don't mess with a global setuptools.
+        return False
+    # easy_install marker
+    if "--help" in sys.argv[1:] or "-h" in sys.argv[1:]: # Don't bother doing anything if they're just asking for help
+        return False
+    return  'install' in sys.argv[1:] or _easy_install_marker()
+
+if _being_installed():
+    from distribute_setup import _before_install
+    _before_install()
+
+# return contents of reStructureText file with linked issue references
+def _linkified(rst_path):
+    bitroot = 'http://bitbucket.org/tarek/distribute'
+    revision = re.compile(r'\b(issue\s+#?\d+)\b', re.M | re.I)
+
+    rst_file = open(rst_path)
+    rst_content = rst_file.read()
+    rst_file.close()
+
+    anchors = revision.findall(rst_content) # ['Issue #43', ...]
+    anchors = sorted(set(anchors))
+    rst_content = revision.sub(r'`\1`_', rst_content)
+    rst_content += "\n"
+    for x in anchors:
+        issue = re.findall(r'\d+', x)[0]
+        rst_content += '.. _`%s`: %s/issue/%s\n' % (x, bitroot, issue)
+    rst_content += "\n"
+    return rst_content
+
+readme_file = open('README.txt')
+long_description = readme_file.read() + _linkified('CHANGES.txt')
+readme_file.close()
+
+dist = setup(
+    name="distribute",
+    version=VERSION,
+    description="Easily download, build, install, upgrade, and uninstall "
+                "Python packages",
+    author="The fellowship of the packaging",
+    author_email="distutils-sig@python.org",
+    license="PSF or ZPL",
+    long_description = long_description,
+    keywords = "CPAN PyPI distutils eggs package management",
+    url = "http://packages.python.org/distribute",
+    test_suite = 'setuptools.tests',
+    src_root = src_root,
+    packages = find_packages(),
+    package_data = {'setuptools':['*.exe']},
+
+    py_modules = ['pkg_resources', 'easy_install', 'site'],
+
+    zip_safe = (sys.version>="2.5"),   # <2.5 needs unzipped for -m to work
+
+    cmdclass = {'test': test},
+    entry_points = {
+
+        "distutils.commands" : [
+            "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
+            for cmd in SETUP_COMMANDS
+        ],
+
+        "distutils.setup_keywords": [
+            "eager_resources        = setuptools.dist:assert_string_list",
+            "namespace_packages     = setuptools.dist:check_nsp",
+            "extras_require         = setuptools.dist:check_extras",
+            "install_requires       = setuptools.dist:check_requirements",
+            "tests_require          = setuptools.dist:check_requirements",
+            "entry_points           = setuptools.dist:check_entry_points",
+            "test_suite             = setuptools.dist:check_test_suite",
+            "zip_safe               = setuptools.dist:assert_bool",
+            "package_data           = setuptools.dist:check_package_data",
+            "exclude_package_data   = setuptools.dist:check_package_data",
+            "include_package_data   = setuptools.dist:assert_bool",
+            "packages               = setuptools.dist:check_packages",
+            "dependency_links       = setuptools.dist:assert_string_list",
+            "test_loader            = setuptools.dist:check_importable",
+            "use_2to3               = setuptools.dist:assert_bool",
+            "convert_2to3_doctests  = setuptools.dist:assert_string_list",
+            "use_2to3_fixers        = setuptools.dist:assert_string_list",
+            "use_2to3_exclude_fixers = setuptools.dist:assert_string_list",
+        ],
+
+        "egg_info.writers": [
+            "PKG-INFO = setuptools.command.egg_info:write_pkg_info",
+            "requires.txt = setuptools.command.egg_info:write_requirements",
+            "entry_points.txt = setuptools.command.egg_info:write_entries",
+            "eager_resources.txt = setuptools.command.egg_info:overwrite_arg",
+            "namespace_packages.txt = setuptools.command.egg_info:overwrite_arg",
+            "top_level.txt = setuptools.command.egg_info:write_toplevel_names",
+            "depends.txt = setuptools.command.egg_info:warn_depends_obsolete",
+            "dependency_links.txt = setuptools.command.egg_info:overwrite_arg",
+        ],
+
+        "console_scripts": console_scripts,
+
+        "setuptools.file_finders":
+            ["svn_cvs = setuptools.command.sdist:_default_revctrl"],
+
+        "setuptools.installation":
+            ['eggsecutable = setuptools.command.easy_install:bootstrap'],
+        },
+
+
+    classifiers = textwrap.dedent("""
+        Development Status :: 5 - Production/Stable
+        Intended Audience :: Developers
+        License :: OSI Approved :: Python Software Foundation License
+        License :: OSI Approved :: Zope Public License
+        Operating System :: OS Independent
+        Programming Language :: Python :: 2.4
+        Programming Language :: Python :: 2.5
+        Programming Language :: Python :: 2.6
+        Programming Language :: Python :: 2.7
+        Programming Language :: Python :: 3
+        Programming Language :: Python :: 3.1
+        Programming Language :: Python :: 3.2
+        Programming Language :: Python :: 3.3
+        Topic :: Software Development :: Libraries :: Python Modules
+        Topic :: System :: Archiving :: Packaging
+        Topic :: System :: Systems Administration
+        Topic :: Utilities
+        """).strip().splitlines(),
+    scripts = scripts,
+)
+
+if _being_installed():
+    from distribute_setup import _after_install
+    _after_install(dist)
diff --git a/vendor/distribute-0.6.35/setuptools/__init__.py b/vendor/distribute-0.6.35/setuptools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9de373f98e29100479609d7ea6f1bbcba1f4f22f
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/__init__.py
@@ -0,0 +1,104 @@
+"""Extensions to the 'distutils' for large or complex distributions"""
+from setuptools.extension import Extension, Library
+from setuptools.dist import Distribution, Feature, _get_unpatched
+import distutils.core, setuptools.command
+from setuptools.depends import Require
+from distutils.core import Command as _Command
+from distutils.util import convert_path
+import os
+import sys
+
+__version__ = '0.6'
+__all__ = [
+    'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
+    'find_packages'
+]
+
+# This marker is used to simplify the process that checks is the
+# setuptools package was installed by the Setuptools project
+# or by the Distribute project, in case Setuptools creates
+# a distribution with the same version.
+#
+# The distribute_setup script for instance, will check if this
+# attribute is present to decide whether to reinstall the package
+# or not.
+_distribute = True
+
+bootstrap_install_from = None
+
+# If we run 2to3 on .py files, should we also convert docstrings?
+# Default: yes; assume that we can detect doctests reliably
+run_2to3_on_doctests = True
+# Standard package names for fixer packages
+lib2to3_fixer_packages = ['lib2to3.fixes']
+
+def find_packages(where='.', exclude=()):
+    """Return a list all Python packages found within directory 'where'
+
+    'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
+    will be converted to the appropriate local path syntax.  'exclude' is a
+    sequence of package names to exclude; '*' can be used as a wildcard in the
+    names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
+    'foo' itself).
+    """
+    out = []
+    stack=[(convert_path(where), '')]
+    while stack:
+        where,prefix = stack.pop(0)
+        for name in os.listdir(where):
+            fn = os.path.join(where,name)
+            if ('.' not in name and os.path.isdir(fn) and
+                os.path.isfile(os.path.join(fn,'__init__.py'))
+            ):
+                out.append(prefix+name); stack.append((fn,prefix+name+'.'))
+    for pat in list(exclude)+['ez_setup', 'distribute_setup']:
+        from fnmatch import fnmatchcase
+        out = [item for item in out if not fnmatchcase(item,pat)]
+    return out
+
+setup = distutils.core.setup
+
+_Command = _get_unpatched(_Command)
+
+class Command(_Command):
+    __doc__ = _Command.__doc__
+
+    command_consumes_arguments = False
+
+    def __init__(self, dist, **kw):
+        # Add support for keyword arguments
+        _Command.__init__(self,dist)
+        for k,v in kw.items():
+            setattr(self,k,v)
+
+    def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+        cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
+        for k,v in kw.items():
+            setattr(cmd,k,v)    # update command with keywords
+        return cmd
+
+import distutils.core
+distutils.core.Command = Command    # we can't patch distutils.cmd, alas
+
+def findall(dir = os.curdir):
+    """Find all files under 'dir' and return the list of full filenames
+    (relative to 'dir').
+    """
+    all_files = []
+    for base, dirs, files in os.walk(dir):
+        if base==os.curdir or base.startswith(os.curdir+os.sep):
+            base = base[2:]
+        if base:
+            files = [os.path.join(base, f) for f in files]
+        all_files.extend(filter(os.path.isfile, files))
+    return all_files
+
+import distutils.filelist
+distutils.filelist.findall = findall    # fix findall bug in distutils.
+
+# sys.dont_write_bytecode was introduced in Python 2.6.
+if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or
+    (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))):
+    _dont_write_bytecode = True
+else:
+    _dont_write_bytecode = False
diff --git a/vendor/distribute-0.6.35/setuptools/archive_util.py b/vendor/distribute-0.6.35/setuptools/archive_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..e22b25c00ddb7f0449054cfb5b78d8bc04695b26
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/archive_util.py
@@ -0,0 +1,214 @@
+"""Utilities for extracting common archive formats"""
+
+
+__all__ = [
+    "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
+    "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
+]
+
+import zipfile, tarfile, os, shutil
+from pkg_resources import ensure_directory
+from distutils.errors import DistutilsError
+
+class UnrecognizedFormat(DistutilsError):
+    """Couldn't recognize the archive type"""
+
+def default_filter(src,dst):
+    """The default progress/filter callback; returns True for all files"""   
+    return dst
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def unpack_archive(filename, extract_dir, progress_filter=default_filter,
+    drivers=None
+):
+    """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
+
+    `progress_filter` is a function taking two arguments: a source path
+    internal to the archive ('/'-separated), and a filesystem path where it
+    will be extracted.  The callback must return the desired extract path
+    (which may be the same as the one passed in), or else ``None`` to skip
+    that file or directory.  The callback can thus be used to report on the
+    progress of the extraction, as well as to filter the items extracted or
+    alter their extraction paths.
+
+    `drivers`, if supplied, must be a non-empty sequence of functions with the
+    same signature as this function (minus the `drivers` argument), that raise
+    ``UnrecognizedFormat`` if they do not support extracting the designated
+    archive type.  The `drivers` are tried in sequence until one is found that
+    does not raise an error, or until all are exhausted (in which case
+    ``UnrecognizedFormat`` is raised).  If you do not supply a sequence of
+    drivers, the module's ``extraction_drivers`` constant will be used, which
+    means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
+    order.
+    """
+    for driver in drivers or extraction_drivers:
+        try:
+            driver(filename, extract_dir, progress_filter)
+        except UnrecognizedFormat:
+            continue
+        else:
+            return
+    else:
+        raise UnrecognizedFormat(
+            "Not a recognized archive type: %s" % filename
+        )
+
+
+
+
+
+
+
+def unpack_directory(filename, extract_dir, progress_filter=default_filter):
+    """"Unpack" a directory, using the same interface as for archives
+
+    Raises ``UnrecognizedFormat`` if `filename` is not a directory
+    """
+    if not os.path.isdir(filename):
+        raise UnrecognizedFormat("%s is not a directory" % (filename,))
+
+    paths = {filename:('',extract_dir)}
+    for base, dirs, files in os.walk(filename):
+        src,dst = paths[base]
+        for d in dirs:
+            paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
+        for f in files:
+            name = src+f
+            target = os.path.join(dst,f)
+            target = progress_filter(src+f, target)
+            if not target:
+                continue    # skip non-files
+            ensure_directory(target)
+            f = os.path.join(base,f)
+            shutil.copyfile(f, target)
+            shutil.copystat(f, target)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
+    """Unpack zip `filename` to `extract_dir`
+
+    Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
+    by ``zipfile.is_zipfile()``).  See ``unpack_archive()`` for an explanation
+    of the `progress_filter` argument.
+    """
+
+    if not zipfile.is_zipfile(filename):
+        raise UnrecognizedFormat("%s is not a zip file" % (filename,))
+
+    z = zipfile.ZipFile(filename)
+    try:
+        for info in z.infolist():
+            name = info.filename
+
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name:
+                continue
+
+            target = os.path.join(extract_dir, *name.split('/'))
+            target = progress_filter(name, target)
+            if not target:
+                continue
+            if name.endswith('/'):
+                # directory
+                ensure_directory(target)
+            else:
+                # file
+                ensure_directory(target)
+                data = z.read(info.filename)
+                f = open(target,'wb')
+                try:
+                    f.write(data)
+                finally:
+                    f.close()
+                    del data
+            unix_attributes = info.external_attr >> 16
+            if unix_attributes:
+                os.chmod(target, unix_attributes)
+    finally:
+        z.close()
+
+
+def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
+    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+
+    Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
+    by ``tarfile.open()``).  See ``unpack_archive()`` for an explanation
+    of the `progress_filter` argument.
+    """
+
+    try:
+        tarobj = tarfile.open(filename)
+    except tarfile.TarError:
+        raise UnrecognizedFormat(
+            "%s is not a compressed or uncompressed tar file" % (filename,)
+        )
+
+    try:
+        tarobj.chown = lambda *args: None   # don't do any chowning!
+        for member in tarobj:
+            name = member.name
+            # don't extract absolute paths or ones with .. in them
+            if not name.startswith('/') and '..' not in name:
+                prelim_dst = os.path.join(extract_dir, *name.split('/'))
+                final_dst = progress_filter(name, prelim_dst)
+                # If progress_filter returns None, then we do not extract
+                # this file
+                # TODO: Do we really need to limit to just these file types?
+                # tarobj.extract() will handle all files on all platforms,
+                # turning file types that aren't allowed on that platform into
+                # regular files.
+                if final_dst and (member.isfile() or member.isdir() or
+                        member.islnk() or member.issym()):
+                    tarobj.extract(member, extract_dir)
+                    if final_dst != prelim_dst:
+                        shutil.move(prelim_dst, final_dst)
+        return True
+    finally:
+        tarobj.close()
+
+
+
+
+extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/cli-32.exe b/vendor/distribute-0.6.35/setuptools/cli-32.exe
new file mode 100755
index 0000000000000000000000000000000000000000..9b7717b78bbf71f105ccde26746a0f6e3a4d12db
Binary files /dev/null and b/vendor/distribute-0.6.35/setuptools/cli-32.exe differ
diff --git a/vendor/distribute-0.6.35/setuptools/cli-64.exe b/vendor/distribute-0.6.35/setuptools/cli-64.exe
new file mode 100755
index 0000000000000000000000000000000000000000..265585afc4042ce55c59d28ef1aab37f0a68ecdc
Binary files /dev/null and b/vendor/distribute-0.6.35/setuptools/cli-64.exe differ
diff --git a/vendor/distribute-0.6.35/setuptools/cli.exe b/vendor/distribute-0.6.35/setuptools/cli.exe
new file mode 100755
index 0000000000000000000000000000000000000000..9b7717b78bbf71f105ccde26746a0f6e3a4d12db
Binary files /dev/null and b/vendor/distribute-0.6.35/setuptools/cli.exe differ
diff --git a/vendor/distribute-0.6.35/setuptools/command/__init__.py b/vendor/distribute-0.6.35/setuptools/command/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b063fa192574da8f4b6fd20a6257164075481031
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/__init__.py
@@ -0,0 +1,21 @@
+__all__ = [
+    'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
+    'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
+    'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
+    'register', 'bdist_wininst', 'upload_docs',
+]
+
+from setuptools.command import install_scripts
+import sys
+
+if sys.version>='2.5':
+    # In Python 2.5 and above, distutils includes its own upload command
+    __all__.remove('upload')
+
+from distutils.command.bdist import bdist
+
+if 'egg' not in bdist.format_commands:
+    bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+    bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/vendor/distribute-0.6.35/setuptools/command/alias.py b/vendor/distribute-0.6.35/setuptools/command/alias.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5368b29e9d705f6dc317e38d71ad2c03668df66
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/alias.py
@@ -0,0 +1,82 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+from setuptools.command.setopt import edit_config, option_base, config_file
+
+def shquote(arg):
+    """Quote an argument for later parsing by shlex.split()"""
+    for c in '"', "'", "\\", "#":
+        if c in arg: return repr(arg)
+    if arg.split()<>[arg]:
+        return repr(arg)
+    return arg        
+
+
+class alias(option_base):
+    """Define a shortcut that invokes one or more commands"""
+    
+    description = "define a shortcut to invoke one or more commands"
+    command_consumes_arguments = True
+
+    user_options = [
+        ('remove',   'r', 'remove (unset) the alias'), 
+    ] + option_base.user_options
+
+    boolean_options = option_base.boolean_options + ['remove']
+
+    def initialize_options(self):
+        option_base.initialize_options(self)
+        self.args = None
+        self.remove = None
+
+    def finalize_options(self):
+        option_base.finalize_options(self)
+        if self.remove and len(self.args)<>1:
+            raise DistutilsOptionError(
+                "Must specify exactly one argument (the alias name) when "
+                "using --remove"
+            )
+
+    def run(self):
+        aliases = self.distribution.get_option_dict('aliases')
+
+        if not self.args:
+            print "Command Aliases"
+            print "---------------"
+            for alias in aliases:
+                print "setup.py alias", format_alias(alias, aliases)
+            return
+
+        elif len(self.args)==1:
+            alias, = self.args
+            if self.remove:
+                command = None
+            elif alias in aliases:
+                print "setup.py alias", format_alias(alias, aliases)
+                return
+            else:
+                print "No alias definition found for %r" % alias
+                return
+        else:
+            alias = self.args[0]
+            command = ' '.join(map(shquote,self.args[1:]))
+
+        edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+    source, command = aliases[name]
+    if source == config_file('global'):
+        source = '--global-config '
+    elif source == config_file('user'):
+        source = '--user-config '
+    elif source == config_file('local'):
+        source = ''
+    else:
+        source = '--filename=%r' % source
+    return source+name+' '+command
+            
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/bdist_egg.py b/vendor/distribute-0.6.35/setuptools/command/bdist_egg.py
new file mode 100644
index 0000000000000000000000000000000000000000..17fae984a7cca21ec2c1c0f01a146565f4cbdf93
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/bdist_egg.py
@@ -0,0 +1,548 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+# This module should be kept compatible with Python 2.3
+import sys, os, marshal
+from setuptools import Command
+from distutils.dir_util import remove_tree, mkpath
+try:
+    from distutils.sysconfig import get_python_version, get_python_lib
+except ImportError:
+    from sysconfig import get_python_version
+    from distutils.sysconfig import get_python_lib
+
+from distutils import log
+from distutils.errors import DistutilsSetupError
+from pkg_resources import get_build_platform, Distribution, ensure_directory
+from pkg_resources import EntryPoint
+from types import CodeType
+from setuptools.extension import Library
+
+def strip_module(filename):
+    if '.' in filename:
+        filename = os.path.splitext(filename)[0]
+    if filename.endswith('module'):
+        filename = filename[:-6]
+    return filename
+
+def write_stub(resource, pyfile):
+    f = open(pyfile,'w')
+    f.write('\n'.join([
+        "def __bootstrap__():",
+        "   global __bootstrap__, __loader__, __file__",
+        "   import sys, pkg_resources, imp",
+        "   __file__ = pkg_resources.resource_filename(__name__,%r)"
+            % resource,
+        "   __loader__ = None; del __bootstrap__, __loader__",
+        "   imp.load_dynamic(__name__,__file__)",
+        "__bootstrap__()",
+        "" # terminal \n
+    ]))
+    f.close()
+
+# stub __init__.py for packages distributed without one
+NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
+
+class bdist_egg(Command):
+
+    description = "create an \"egg\" distribution"
+
+    user_options = [
+        ('bdist-dir=', 'b',
+            "temporary directory for creating the distribution"),
+        ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_build_platform()),
+        ('exclude-source-files', None,
+                     "remove all .py files from the generated egg"),
+        ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+        ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+        ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+    ]
+
+    boolean_options = [
+        'keep-temp', 'skip-build', 'exclude-source-files'
+    ]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = 0
+        self.dist_dir = None
+        self.skip_build = 0
+        self.egg_output = None
+        self.exclude_source_files = None
+
+
+    def finalize_options(self):
+        ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
+        self.egg_info = ei_cmd.egg_info
+
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+        if self.plat_name is None:
+            self.plat_name = get_build_platform()
+
+        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+
+        if self.egg_output is None:
+
+            # Compute filename of the output egg
+            basename = Distribution(
+                None, None, ei_cmd.egg_name, ei_cmd.egg_version,
+                get_python_version(),
+                self.distribution.has_ext_modules() and self.plat_name
+            ).egg_name()
+
+            self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
+
+
+
+
+
+
+
+
+    def do_install_data(self):
+        # Hack for packages that install data to install's --install-lib
+        self.get_finalized_command('install').install_lib = self.bdist_dir
+
+        site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
+        old, self.distribution.data_files = self.distribution.data_files,[]
+
+        for item in old:
+            if isinstance(item,tuple) and len(item)==2:
+                if os.path.isabs(item[0]):
+                    realpath = os.path.realpath(item[0])
+                    normalized = os.path.normcase(realpath)
+                    if normalized==site_packages or normalized.startswith(
+                        site_packages+os.sep
+                    ):
+                        item = realpath[len(site_packages)+1:], item[1]
+                    # XXX else: raise ???
+            self.distribution.data_files.append(item)
+
+        try:
+            log.info("installing package data to %s" % self.bdist_dir)
+            self.call_command('install_data', force=0, root=None)
+        finally:
+            self.distribution.data_files = old
+
+
+    def get_outputs(self):
+        return [self.egg_output]
+
+
+    def call_command(self,cmdname,**kw):
+        """Invoke reinitialized command `cmdname` with keyword args"""
+        for dirname in INSTALL_DIRECTORY_ATTRS:
+            kw.setdefault(dirname,self.bdist_dir)
+        kw.setdefault('skip_build',self.skip_build)
+        kw.setdefault('dry_run', self.dry_run)
+        cmd = self.reinitialize_command(cmdname, **kw)
+        self.run_command(cmdname)
+        return cmd
+
+
+    def run(self):
+        # Generate metadata first
+        self.run_command("egg_info")
+
+        # We run install_lib before install_data, because some data hacks
+        # pull their data path from the install_lib command.
+        log.info("installing library code to %s" % self.bdist_dir)
+        instcmd = self.get_finalized_command('install')
+        old_root = instcmd.root; instcmd.root = None
+        cmd = self.call_command('install_lib', warn_dir=0)
+        instcmd.root = old_root
+
+        all_outputs, ext_outputs = self.get_ext_outputs()
+        self.stubs = []
+        to_compile = []
+        for (p,ext_name) in enumerate(ext_outputs):
+            filename,ext = os.path.splitext(ext_name)
+            pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
+            self.stubs.append(pyfile)
+            log.info("creating stub loader for %s" % ext_name)
+            if not self.dry_run:
+                write_stub(os.path.basename(ext_name), pyfile)
+            to_compile.append(pyfile)
+            ext_outputs[p] = ext_name.replace(os.sep,'/')
+
+        to_compile.extend(self.make_init_files())
+        if to_compile:
+            cmd.byte_compile(to_compile)
+
+        if self.distribution.data_files:
+            self.do_install_data()
+
+        # Make the EGG-INFO directory
+        archive_root = self.bdist_dir
+        egg_info = os.path.join(archive_root,'EGG-INFO')
+        self.mkpath(egg_info)
+        if self.distribution.scripts:
+            script_dir = os.path.join(egg_info, 'scripts')
+            log.info("installing scripts to %s" % script_dir)
+            self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
+
+        self.copy_metadata_to(egg_info)
+        native_libs = os.path.join(egg_info, "native_libs.txt")
+        if all_outputs:
+            log.info("writing %s" % native_libs)
+            if not self.dry_run:
+                ensure_directory(native_libs)
+                libs_file = open(native_libs, 'wt')
+                libs_file.write('\n'.join(all_outputs))
+                libs_file.write('\n')
+                libs_file.close()
+        elif os.path.isfile(native_libs):
+            log.info("removing %s" % native_libs)
+            if not self.dry_run:
+                os.unlink(native_libs)
+
+        write_safety_flag(
+            os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
+        )
+
+        if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
+            log.warn(
+                "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+                "Use the install_requires/extras_require setup() args instead."
+            )
+
+        if self.exclude_source_files:
+            self.zap_pyfiles()
+
+        # Make the archive
+        make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
+                          dry_run=self.dry_run, mode=self.gen_header())
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution,'dist_files',[]).append(
+            ('bdist_egg',get_python_version(),self.egg_output))
+
+
+
+
+    def zap_pyfiles(self):
+        log.info("Removing .py files from temporary directory")
+        for base,dirs,files in walk_egg(self.bdist_dir):
+            for name in files:
+                if name.endswith('.py'):
+                    path = os.path.join(base,name)
+                    log.debug("Deleting %s", path)
+                    os.unlink(path)
+
+    def zip_safe(self):
+        safe = getattr(self.distribution,'zip_safe',None)
+        if safe is not None:
+            return safe
+        log.warn("zip_safe flag not set; analyzing archive contents...")
+        return analyze_egg(self.bdist_dir, self.stubs)
+
+    def make_init_files(self):
+        """Create missing package __init__ files"""
+        init_files = []
+        for base,dirs,files in walk_egg(self.bdist_dir):
+            if base==self.bdist_dir:
+                # don't put an __init__ in the root
+                continue
+            for name in files:
+                if name.endswith('.py'):
+                    if '__init__.py' not in files:
+                        pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
+                        if self.distribution.has_contents_for(pkg):
+                            log.warn("Creating missing __init__.py for %s",pkg)
+                            filename = os.path.join(base,'__init__.py')
+                            if not self.dry_run:
+                                f = open(filename,'w'); f.write(NS_PKG_STUB)
+                                f.close()
+                            init_files.append(filename)
+                    break
+            else:
+                # not a package, don't traverse to subdirectories
+                dirs[:] = []
+
+        return init_files
+
+    def gen_header(self):
+        epm = EntryPoint.parse_map(self.distribution.entry_points or '')
+        ep = epm.get('setuptools.installation',{}).get('eggsecutable')
+        if ep is None:
+            return 'w'  # not an eggsecutable, do it the usual way.
+
+        if not ep.attrs or ep.extras:
+            raise DistutilsSetupError(
+                "eggsecutable entry point (%r) cannot have 'extras' "
+                "or refer to a module" % (ep,)
+            )
+
+        pyver = sys.version[:3]
+        pkg = ep.module_name
+        full = '.'.join(ep.attrs)
+        base = ep.attrs[0]
+        basename = os.path.basename(self.egg_output)
+
+        header = (
+            "#!/bin/sh\n"
+            'if [ `basename $0` = "%(basename)s" ]\n'
+            'then exec python%(pyver)s -c "'
+            "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
+            "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
+            '" "$@"\n'
+            'else\n'
+            '  echo $0 is not the correct name for this egg file.\n'
+            '  echo Please rename it back to %(basename)s and try again.\n'
+            '  exec false\n'
+            'fi\n'
+
+        ) % locals()
+
+        if not self.dry_run:
+            mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
+            f = open(self.egg_output, 'w')
+            f.write(header)
+            f.close()
+        return 'a'
+
+
+    def copy_metadata_to(self, target_dir):
+        "Copy metadata (egg info) to the target_dir"
+        # normalize the path (so that a forward-slash in egg_info will
+        # match using startswith below)
+        norm_egg_info = os.path.normpath(self.egg_info)
+        prefix = os.path.join(norm_egg_info,'')
+        for path in self.ei_cmd.filelist.files:
+            if path.startswith(prefix):
+                target = os.path.join(target_dir, path[len(prefix):])
+                ensure_directory(target)
+                self.copy_file(path, target)
+
+    def get_ext_outputs(self):
+        """Get a list of relative paths to C extensions in the output distro"""
+
+        all_outputs = []
+        ext_outputs = []
+
+        paths = {self.bdist_dir:''}
+        for base, dirs, files in os.walk(self.bdist_dir):
+            for filename in files:
+                if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
+                    all_outputs.append(paths[base]+filename)
+            for filename in dirs:
+                paths[os.path.join(base,filename)] = paths[base]+filename+'/'
+
+        if self.distribution.has_ext_modules():
+            build_cmd = self.get_finalized_command('build_ext')
+            for ext in build_cmd.extensions:
+                if isinstance(ext,Library):
+                    continue
+                fullname = build_cmd.get_ext_fullname(ext.name)
+                filename = build_cmd.get_ext_filename(fullname)
+                if not os.path.basename(filename).startswith('dl-'):
+                    if os.path.exists(os.path.join(self.bdist_dir,filename)):
+                        ext_outputs.append(filename)
+
+        return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+
+
+def walk_egg(egg_dir):
+    """Walk an unpacked egg's contents, skipping the metadata directory"""
+    walker = os.walk(egg_dir)
+    base,dirs,files = walker.next()
+    if 'EGG-INFO' in dirs:
+        dirs.remove('EGG-INFO')
+    yield base,dirs,files
+    for bdf in walker:
+        yield bdf
+
+def analyze_egg(egg_dir, stubs):
+    # check for existing flag in EGG-INFO
+    for flag,fn in safety_flags.items():
+        if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
+            return flag
+    if not can_scan(): return False
+    safe = True
+    for base, dirs, files in walk_egg(egg_dir):
+        for name in files:
+            if name.endswith('.py') or name.endswith('.pyw'):
+                continue
+            elif name.endswith('.pyc') or name.endswith('.pyo'):
+                # always scan, even if we already know we're not safe
+                safe = scan_module(egg_dir, base, name, stubs) and safe
+    return safe
+
+def write_safety_flag(egg_dir, safe):
+    # Write or remove zip safety flag file(s)
+    for flag,fn in safety_flags.items():
+        fn = os.path.join(egg_dir, fn)
+        if os.path.exists(fn):
+            if safe is None or bool(safe)<>flag:
+                os.unlink(fn)
+        elif safe is not None and bool(safe)==flag:
+            f=open(fn,'wt'); f.write('\n'); f.close()
+
+safety_flags = {
+    True: 'zip-safe',
+    False: 'not-zip-safe',
+}
+
+def scan_module(egg_dir, base, name, stubs):
+    """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+    filename = os.path.join(base,name)
+    if filename[:-1] in stubs:
+        return True     # Extension module
+    pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
+    module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
+    if sys.version_info < (3, 3):
+        skip = 8   # skip magic & date
+    else:
+        skip = 12  # skip magic & date & file size
+    f = open(filename,'rb'); f.read(skip)
+    code = marshal.load(f); f.close()
+    safe = True
+    symbols = dict.fromkeys(iter_symbols(code))
+    for bad in ['__file__', '__path__']:
+        if bad in symbols:
+            log.warn("%s: module references %s", module, bad)
+            safe = False
+    if 'inspect' in symbols:
+        for bad in [
+            'getsource', 'getabsfile', 'getsourcefile', 'getfile'
+            'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
+            'getinnerframes', 'getouterframes', 'stack', 'trace'
+        ]:
+            if bad in symbols:
+                log.warn("%s: module MAY be using inspect.%s", module, bad)
+                safe = False
+    if '__name__' in symbols and '__main__' in symbols and '.' not in module:
+        if sys.version[:3]=="2.4":  # -m works w/zipfiles in 2.5
+            log.warn("%s: top-level module may be 'python -m' script", module)
+            safe = False
+    return safe
+
+def iter_symbols(code):
+    """Yield names and strings used by `code` and its nested code objects"""
+    for name in code.co_names: yield name
+    for const in code.co_consts:
+        if isinstance(const,basestring):
+            yield const
+        elif isinstance(const,CodeType):
+            for name in iter_symbols(const):
+                yield name
+
+def can_scan():
+    if not sys.platform.startswith('java') and sys.platform != 'cli':
+        # CPython, PyPy, etc.
+        return True
+    log.warn("Unable to analyze compiled code on this platform.")
+    log.warn("Please ask the author to include a 'zip_safe'"
+             " setting (either True or False) in the package's setup.py")
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = [
+    'install_lib', 'install_dir', 'install_data', 'install_base'
+]
+
+def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
+    mode='w'
+):
+    """Create a zip file from all the files under 'base_dir'.  The output
+    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
+    Python module (if available) or the InfoZIP "zip" utility (if installed
+    and found on the default search path).  If neither tool is available,
+    raises DistutilsExecError.  Returns the name of the output zip file.
+    """
+    import zipfile
+    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+    def visit(z, dirname, names):
+        for name in names:
+            path = os.path.normpath(os.path.join(dirname, name))
+            if os.path.isfile(path):
+                p = path[len(base_dir)+1:]
+                if not dry_run:
+                    z.write(path, p)
+                log.debug("adding '%s'" % p)
+
+    if compress is None:
+        compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
+
+    compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
+    if not dry_run:
+        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
+        for dirname, dirs, files in os.walk(base_dir):
+            visit(z, dirname, files)
+        z.close()
+    else:
+        for dirname, dirs, files in os.walk(base_dir):
+            visit(None, dirname, files)
+    return zip_filename
+#
diff --git a/vendor/distribute-0.6.35/setuptools/command/bdist_rpm.py b/vendor/distribute-0.6.35/setuptools/command/bdist_rpm.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c48da35591037462d40d15adb96bdeb4351d30f
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/bdist_rpm.py
@@ -0,0 +1,82 @@
+# This is just a kludge so that bdist_rpm doesn't guess wrong about the
+# distribution name and version, if the egg_info command is going to alter
+# them, another kludge to allow you to build old-style non-egg RPMs, and
+# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
+
+from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
+import sys, os
+
+class bdist_rpm(_bdist_rpm):
+
+    def initialize_options(self):
+        _bdist_rpm.initialize_options(self)
+        self.no_egg = None
+
+    if sys.version<"2.5":
+        # Track for uploading any .rpm file(s) moved to self.dist_dir
+        def move_file(self, src, dst, level=1):
+            _bdist_rpm.move_file(self, src, dst, level)
+            if dst==self.dist_dir and src.endswith('.rpm'):
+                getattr(self.distribution,'dist_files',[]).append(
+                    ('bdist_rpm',
+                    src.endswith('.src.rpm') and 'any' or sys.version[:3],
+                     os.path.join(dst, os.path.basename(src)))
+                )
+
+    def run(self):
+        self.run_command('egg_info')    # ensure distro name is up-to-date
+        _bdist_rpm.run(self)
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def _make_spec_file(self):
+        version = self.distribution.get_version()
+        rpmversion = version.replace('-','_')
+        spec = _bdist_rpm._make_spec_file(self)
+        line23 = '%define version '+version
+        line24 = '%define version '+rpmversion
+        spec  = [
+            line.replace(
+                "Source0: %{name}-%{version}.tar",
+                "Source0: %{name}-%{unmangled_version}.tar"
+            ).replace(
+                "setup.py install ",
+                "setup.py install --single-version-externally-managed "
+            ).replace(
+                "%setup",
+                "%setup -n %{name}-%{unmangled_version}"
+            ).replace(line23,line24)
+            for line in spec
+        ]
+        spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
+        return spec
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/bdist_wininst.py b/vendor/distribute-0.6.35/setuptools/command/bdist_wininst.py
new file mode 100644
index 0000000000000000000000000000000000000000..93e6846d79c92a461e0a84b0fd8c23fd5e9dfee3
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/bdist_wininst.py
@@ -0,0 +1,41 @@
+from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
+import os, sys
+
+class bdist_wininst(_bdist_wininst):
+
+    def create_exe(self, arcname, fullname, bitmap=None):
+        _bdist_wininst.create_exe(self, arcname, fullname, bitmap)
+        dist_files = getattr(self.distribution, 'dist_files', [])
+
+        if self.target_version:
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.win32-py%s.exe" %
+                                           (fullname, self.target_version))
+            pyversion = self.target_version
+
+            # fix 2.5 bdist_wininst ignoring --target-version spec
+            bad = ('bdist_wininst','any',installer_name)
+            if bad in dist_files:
+                dist_files.remove(bad)
+        else:
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.win32.exe" % fullname)
+            pyversion = 'any'
+        good = ('bdist_wininst', pyversion, installer_name)
+        if good not in dist_files:
+            dist_files.append(good)
+
+    def reinitialize_command (self, command, reinit_subcommands=0):
+        cmd = self.distribution.reinitialize_command(
+            command, reinit_subcommands)
+        if command in ('install', 'install_lib'):
+            cmd.install_lib = None  # work around distutils bug
+        return cmd
+
+    def run(self):
+        self._is_running = True
+        try:
+            _bdist_wininst.run(self)
+        finally:
+            self._is_running = False
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/build_ext.py b/vendor/distribute-0.6.35/setuptools/command/build_ext.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a94572cbd60333a7d4c8fbb1c34aa2961f552e3
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/build_ext.py
@@ -0,0 +1,294 @@
+from distutils.command.build_ext import build_ext as _du_build_ext
+try:
+    # Attempt to use Pyrex for building extensions, if available
+    from Pyrex.Distutils.build_ext import build_ext as _build_ext
+except ImportError:
+    _build_ext = _du_build_ext
+
+import os, sys
+from distutils.file_util import copy_file
+from setuptools.extension import Library
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+get_config_var("LDSHARED")  # make sure _config_vars is initialized
+from distutils.sysconfig import _config_vars
+from distutils import log
+from distutils.errors import *
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+    use_stubs = True
+elif os.name != 'nt':
+    try:
+        from dl import RTLD_NOW
+        have_rtld = True
+        use_stubs = True
+    except ImportError:
+        pass
+
+def if_dl(s):
+    if have_rtld:
+        return s
+    return ''
+
+
+
+
+
+
+class build_ext(_build_ext):
+    def run(self):
+        """Build extensions in build directory, then copy if --inplace"""
+        old_inplace, self.inplace = self.inplace, 0
+        _build_ext.run(self)
+        self.inplace = old_inplace
+        if old_inplace:
+            self.copy_extensions_to_source()
+
+    def copy_extensions_to_source(self):
+        build_py = self.get_finalized_command('build_py')
+        for ext in self.extensions:
+            fullname = self.get_ext_fullname(ext.name)
+            filename = self.get_ext_filename(fullname)
+            modpath = fullname.split('.')
+            package = '.'.join(modpath[:-1])
+            package_dir = build_py.get_package_dir(package)
+            dest_filename = os.path.join(package_dir,os.path.basename(filename))
+            src_filename = os.path.join(self.build_lib,filename)
+
+            # Always copy, even if source is older than destination, to ensure
+            # that the right extensions for the current Python/platform are
+            # used.
+            copy_file(
+                src_filename, dest_filename, verbose=self.verbose,
+                dry_run=self.dry_run
+            )
+            if ext._needs_stub:
+                self.write_stub(package_dir or os.curdir, ext, True)
+
+
+    if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
+        # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
+        def swig_sources(self, sources, *otherargs):
+            # first do any Pyrex processing
+            sources = _build_ext.swig_sources(self, sources) or sources
+            # Then do any actual SWIG stuff on the remainder
+            return _du_build_ext.swig_sources(self, sources, *otherargs)
+
+
+
+    def get_ext_filename(self, fullname):
+        filename = _build_ext.get_ext_filename(self,fullname)
+        if fullname not in self.ext_map:
+            return filename
+        ext = self.ext_map[fullname]
+        if isinstance(ext,Library):
+            fn, ext = os.path.splitext(filename)
+            return self.shlib_compiler.library_filename(fn,libtype)
+        elif use_stubs and ext._links_to_dynamic:
+            d,fn = os.path.split(filename)
+            return os.path.join(d,'dl-'+fn)
+        else:
+            return filename
+
+    def initialize_options(self):
+        _build_ext.initialize_options(self)
+        self.shlib_compiler = None
+        self.shlibs = []
+        self.ext_map = {}
+
+    def finalize_options(self):
+        _build_ext.finalize_options(self)
+        self.extensions = self.extensions or []
+        self.check_extensions_list(self.extensions)
+        self.shlibs = [ext for ext in self.extensions
+                        if isinstance(ext,Library)]
+        if self.shlibs:
+            self.setup_shlib_compiler()
+        for ext in self.extensions:
+            ext._full_name = self.get_ext_fullname(ext.name)
+        for ext in self.extensions:
+            fullname = ext._full_name
+            self.ext_map[fullname] = ext
+
+            # distutils 3.1 will also ask for module names
+            # XXX what to do with conflicts?
+            self.ext_map[fullname.split('.')[-1]] = ext
+
+            ltd = ext._links_to_dynamic = \
+                self.shlibs and self.links_to_dynamic(ext) or False
+            ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
+            filename = ext._file_name = self.get_ext_filename(fullname)
+            libdir = os.path.dirname(os.path.join(self.build_lib,filename))
+            if ltd and libdir not in ext.library_dirs:
+                ext.library_dirs.append(libdir)
+            if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+                ext.runtime_library_dirs.append(os.curdir)
+
+    def setup_shlib_compiler(self):
+        compiler = self.shlib_compiler = new_compiler(
+            compiler=self.compiler, dry_run=self.dry_run, force=self.force
+        )
+        if sys.platform == "darwin":
+            tmp = _config_vars.copy()
+            try:
+                # XXX Help!  I don't have any idea whether these are right...
+                _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
+                _config_vars['CCSHARED'] = " -dynamiclib"
+                _config_vars['SO'] = ".dylib"
+                customize_compiler(compiler)
+            finally:
+                _config_vars.clear()
+                _config_vars.update(tmp)
+        else:
+            customize_compiler(compiler)
+
+        if self.include_dirs is not None:
+            compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name,value) in self.define:
+                compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            compiler.set_link_objects(self.link_objects)
+
+        # hack so distutils' build_extension() builds a library instead
+        compiler.link_shared_object = link_shared_object.__get__(compiler)
+
+
+
+    def get_export_symbols(self, ext):
+        if isinstance(ext,Library):
+            return ext.export_symbols
+        return _build_ext.get_export_symbols(self,ext)
+
+    def build_extension(self, ext):
+        _compiler = self.compiler
+        try:
+            if isinstance(ext,Library):
+                self.compiler = self.shlib_compiler
+            _build_ext.build_extension(self,ext)
+            if ext._needs_stub:
+                self.write_stub(
+                    self.get_finalized_command('build_py').build_lib, ext
+                )
+        finally:
+            self.compiler = _compiler
+
+    def links_to_dynamic(self, ext):
+        """Return true if 'ext' links to a dynamic lib in the same package"""
+        # XXX this should check to ensure the lib is actually being built
+        # XXX as dynamic, and not just using a locally-found version or a
+        # XXX static-compiled version
+        libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+        pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
+        for libname in ext.libraries:
+            if pkg+libname in libnames: return True
+        return False
+
+    def get_outputs(self):
+        outputs = _build_ext.get_outputs(self)
+        optimize = self.get_finalized_command('build_py').optimize
+        for ext in self.extensions:
+            if ext._needs_stub:
+                base = os.path.join(self.build_lib, *ext._full_name.split('.'))
+                outputs.append(base+'.py')
+                outputs.append(base+'.pyc')
+                if optimize:
+                    outputs.append(base+'.pyo')
+        return outputs
+
+    def write_stub(self, output_dir, ext, compile=False):
+        log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
+        stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
+        if compile and os.path.exists(stub_file):
+            raise DistutilsError(stub_file+" already exists! Please delete.")
+        if not self.dry_run:
+            f = open(stub_file,'w')
+            f.write('\n'.join([
+                "def __bootstrap__():",
+                "   global __bootstrap__, __file__, __loader__",
+                "   import sys, os, pkg_resources, imp"+if_dl(", dl"),
+                "   __file__ = pkg_resources.resource_filename(__name__,%r)"
+                   % os.path.basename(ext._file_name),
+                "   del __bootstrap__",
+                "   if '__loader__' in globals():",
+                "       del __loader__",
+                if_dl("   old_flags = sys.getdlopenflags()"),
+                "   old_dir = os.getcwd()",
+                "   try:",
+                "     os.chdir(os.path.dirname(__file__))",
+                if_dl("     sys.setdlopenflags(dl.RTLD_NOW)"),
+                "     imp.load_dynamic(__name__,__file__)",
+                "   finally:",
+                if_dl("     sys.setdlopenflags(old_flags)"),
+                "     os.chdir(old_dir)",
+                "__bootstrap__()",
+                "" # terminal \n
+            ]))
+            f.close()
+        if compile:
+            from distutils.util import byte_compile
+            byte_compile([stub_file], optimize=0,
+                         force=True, dry_run=self.dry_run)
+            optimize = self.get_finalized_command('install_lib').optimize
+            if optimize > 0:
+                byte_compile([stub_file], optimize=optimize,
+                             force=True, dry_run=self.dry_run)
+            if os.path.exists(stub_file) and not self.dry_run:
+                os.unlink(stub_file)
+
+
+if use_stubs or os.name=='nt':
+    # Build shared libraries
+    #
+    def link_shared_object(self, objects, output_libname, output_dir=None,
+        libraries=None, library_dirs=None, runtime_library_dirs=None,
+        export_symbols=None, debug=0, extra_preargs=None,
+        extra_postargs=None, build_temp=None, target_lang=None
+    ):  self.link(
+            self.SHARED_LIBRARY, objects, output_libname,
+            output_dir, libraries, library_dirs, runtime_library_dirs,
+            export_symbols, debug, extra_preargs, extra_postargs,
+            build_temp, target_lang
+        )
+else:
+    # Build static libraries everywhere else
+    libtype = 'static'
+
+    def link_shared_object(self, objects, output_libname, output_dir=None,
+        libraries=None, library_dirs=None, runtime_library_dirs=None,
+        export_symbols=None, debug=0, extra_preargs=None,
+        extra_postargs=None, build_temp=None, target_lang=None
+    ):
+        # XXX we need to either disallow these attrs on Library instances,
+        #     or warn/abort here if set, or something...
+        #libraries=None, library_dirs=None, runtime_library_dirs=None,
+        #export_symbols=None, extra_preargs=None, extra_postargs=None,
+        #build_temp=None
+
+        assert output_dir is None   # distutils build_ext doesn't pass this
+        output_dir,filename = os.path.split(output_libname)
+        basename, ext = os.path.splitext(filename)
+        if self.library_filename("x").startswith('lib'):
+            # strip 'lib' prefix; this is kludgy if some platform uses
+            # a different prefix
+            basename = basename[3:]
+
+        self.create_static_lib(
+            objects, basename, output_dir, debug, target_lang
+        )
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/build_py.py b/vendor/distribute-0.6.35/setuptools/command/build_py.py
new file mode 100644
index 0000000000000000000000000000000000000000..8751acd493cf5c3c45022f29940ee2beda3bb6cd
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/build_py.py
@@ -0,0 +1,280 @@
+import os.path, sys, fnmatch
+from distutils.command.build_py import build_py as _build_py
+from distutils.util import convert_path
+from glob import glob
+
+try:
+    from distutils.util import Mixin2to3 as _Mixin2to3
+    # add support for converting doctests that is missing in 3.1 distutils
+    from distutils import log
+    from lib2to3.refactor import RefactoringTool, get_fixers_from_package
+    import setuptools
+    class DistutilsRefactoringTool(RefactoringTool):
+        def log_error(self, msg, *args, **kw):
+            log.error(msg, *args)
+
+        def log_message(self, msg, *args):
+            log.info(msg, *args)
+
+        def log_debug(self, msg, *args):
+            log.debug(msg, *args)
+
+    class Mixin2to3(_Mixin2to3):
+        def run_2to3(self, files, doctests = False):
+            # See of the distribution option has been set, otherwise check the
+            # setuptools default.
+            if self.distribution.use_2to3 is not True:
+                return
+            if not files:
+                return
+            log.info("Fixing "+" ".join(files))
+            self.__build_fixer_names()
+            self.__exclude_fixers()
+            if doctests:
+                if setuptools.run_2to3_on_doctests:
+                    r = DistutilsRefactoringTool(self.fixer_names)
+                    r.refactor(files, write=True, doctests_only=True)
+            else:
+                _Mixin2to3.run_2to3(self, files)
+
+        def __build_fixer_names(self):
+            if self.fixer_names: return
+            self.fixer_names = []
+            for p in setuptools.lib2to3_fixer_packages:
+                self.fixer_names.extend(get_fixers_from_package(p))
+            if self.distribution.use_2to3_fixers is not None:
+                for p in self.distribution.use_2to3_fixers:
+                    self.fixer_names.extend(get_fixers_from_package(p))
+
+        def __exclude_fixers(self):
+            excluded_fixers = getattr(self, 'exclude_fixers', [])
+            if self.distribution.use_2to3_exclude_fixers is not None:
+                excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
+            for fixer_name in excluded_fixers:
+                if fixer_name in self.fixer_names:
+                    self.fixer_names.remove(fixer_name)
+
+except ImportError:
+    class Mixin2to3:
+        def run_2to3(self, files, doctests=True):
+            # Nothing done in 2.x
+            pass
+
+class build_py(_build_py, Mixin2to3):
+    """Enhanced 'build_py' command that includes data files with packages
+
+    The data files are specified via a 'package_data' argument to 'setup()'.
+    See 'setuptools.dist.Distribution' for more details.
+
+    Also, this version of the 'build_py' command allows you to specify both
+    'py_modules' and 'packages' in the same setup operation.
+    """
+    def finalize_options(self):
+        _build_py.finalize_options(self)
+        self.package_data = self.distribution.package_data
+        self.exclude_package_data = self.distribution.exclude_package_data or {}
+        if 'data_files' in self.__dict__: del self.__dict__['data_files']
+        self.__updated_files = []
+        self.__doctests_2to3 = []
+
+    def run(self):
+        """Build modules, packages, and copy data files to build directory"""
+        if not self.py_modules and not self.packages:
+            return
+
+        if self.py_modules:
+            self.build_modules()
+
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        self.run_2to3(self.__updated_files, False)
+        self.run_2to3(self.__updated_files, True)
+        self.run_2to3(self.__doctests_2to3, True)
+
+        # Only compile actual .py files, using our base class' idea of what our
+        # output files are.
+        self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
+
+    def __getattr__(self,attr):
+        if attr=='data_files':  # lazily compute data files
+            self.data_files = files = self._get_data_files(); return files
+        return _build_py.__getattr__(self,attr)
+
+    def build_module(self, module, module_file, package):
+        outfile, copied = _build_py.build_module(self, module, module_file, package)
+        if copied:
+            self.__updated_files.append(outfile)
+        return outfile, copied
+
+    def _get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+        self.analyze_manifest()
+        data = []
+        for package in self.packages or ():
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = len(src_dir)+1
+
+            # Strip directory from globbed filenames
+            filenames = [
+                file[plen:] for file in self.find_data_files(package, src_dir)
+                ]
+            data.append( (package, src_dir, build_dir, filenames) )
+        return data
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'"""
+        globs = (self.package_data.get('', [])
+                 + self.package_data.get(package, []))
+        files = self.manifest_files.get(package, [])[:]
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
+        return self.exclude_data_files(package, src_dir, files)
+
+    def build_package_data(self):
+        """Copy data files into build directory"""
+        lastdir = None
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                srcfile = os.path.join(src_dir, filename)
+                outf, copied = self.copy_file(srcfile, target)
+                srcfile = os.path.abspath(srcfile)
+                if copied and srcfile in self.distribution.convert_2to3_doctests:
+                    self.__doctests_2to3.append(outf)
+
+
+    def analyze_manifest(self):
+        self.manifest_files = mf = {}
+        if not self.distribution.include_package_data:
+            return
+        src_dirs = {}
+        for package in self.packages or ():
+            # Locate package source directory
+            src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+        self.run_command('egg_info')
+        ei_cmd = self.get_finalized_command('egg_info')
+        for path in ei_cmd.filelist.files:
+            d,f = os.path.split(assert_relative(path))
+            prev = None
+            oldf = f
+            while d and d!=prev and d not in src_dirs:
+                prev = d
+                d, df = os.path.split(d)
+                f = os.path.join(df, f)
+            if d in src_dirs:
+                if path.endswith('.py') and f==oldf:
+                    continue    # it's a module, not data
+                mf.setdefault(src_dirs[d],[]).append(path)
+
+    def get_data_files(self): pass  # kludge 2.4 for lazy computation
+
+    if sys.version<"2.4":    # Python 2.4 already has this code
+        def get_outputs(self, include_bytecode=1):
+            """Return complete list of files copied to the build directory
+
+            This includes both '.py' files and data files, as well as '.pyc'
+            and '.pyo' files if 'include_bytecode' is true.  (This method is
+            needed for the 'install_lib' command to do its job properly, and to
+            generate a correct installation manifest.)
+            """
+            return _build_py.get_outputs(self, include_bytecode) + [
+                os.path.join(build_dir, filename)
+                for package, src_dir, build_dir,filenames in self.data_files
+                for filename in filenames
+                ]
+
+    def check_package(self, package, package_dir):
+        """Check namespace packages' __init__ for declare_namespace"""
+        try:
+            return self.packages_checked[package]
+        except KeyError:
+            pass
+
+        init_py = _build_py.check_package(self, package, package_dir)
+        self.packages_checked[package] = init_py
+
+        if not init_py or not self.distribution.namespace_packages:
+            return init_py
+
+        for pkg in self.distribution.namespace_packages:
+            if pkg==package or pkg.startswith(package+'.'):
+                break
+        else:
+            return init_py
+
+        f = open(init_py,'rbU')
+        if 'declare_namespace'.encode() not in f.read():
+            from distutils import log
+            log.warn(
+               "WARNING: %s is a namespace package, but its __init__.py does\n"
+               "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
+               '(See the setuptools manual under "Namespace Packages" for '
+               "details.)\n", package
+            )
+        f.close()
+        return init_py
+
+    def initialize_options(self):
+        self.packages_checked={}
+        _build_py.initialize_options(self)
+
+
+    def get_package_dir(self, package):
+        res = _build_py.get_package_dir(self, package)
+        if self.distribution.src_root is not None:
+            return os.path.join(self.distribution.src_root, res)
+        return res
+
+
+    def exclude_data_files(self, package, src_dir, files):
+        """Filter filenames for package's data files in 'src_dir'"""
+        globs = (self.exclude_package_data.get('', [])
+                 + self.exclude_package_data.get(package, []))
+        bad = []
+        for pattern in globs:
+            bad.extend(
+                fnmatch.filter(
+                    files, os.path.join(src_dir, convert_path(pattern))
+                )
+            )
+        bad = dict.fromkeys(bad)
+        seen = {}
+        return [
+            f for f in files if f not in bad
+                and f not in seen and seen.setdefault(f,1)  # ditch dupes
+        ]
+
+
+def assert_relative(path):
+    if not os.path.isabs(path):
+        return path
+    from distutils.errors import DistutilsSetupError
+    raise DistutilsSetupError(
+"""Error: setup script specifies an absolute path:
+
+    %s
+
+setup() arguments must *always* be /-separated paths relative to the
+setup.py directory, *never* absolute paths.
+""" % path
+    )
+
+
+
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/develop.py b/vendor/distribute-0.6.35/setuptools/command/develop.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d500040d0da40ea372da58a3af19b11137e8598
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/develop.py
@@ -0,0 +1,167 @@
+from setuptools.command.easy_install import easy_install
+from distutils.util import convert_path, subst_vars
+from pkg_resources import Distribution, PathMetadata, normalize_path
+from distutils import log
+from distutils.errors import DistutilsError, DistutilsOptionError
+import os, sys, setuptools, glob
+
+class develop(easy_install):
+    """Set up package for development"""
+
+    description = "install package in 'development mode'"
+
+    user_options = easy_install.user_options + [
+        ("uninstall", "u", "Uninstall this source package"),
+        ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+    ]
+
+    boolean_options = easy_install.boolean_options + ['uninstall']
+
+    command_consumes_arguments = False  # override base
+
+    def run(self):
+        if self.uninstall:
+            self.multi_version = True
+            self.uninstall_link()
+        else:
+            self.install_for_development()
+        self.warn_deprecated_options()
+
+    def initialize_options(self):
+        self.uninstall = None
+        self.egg_path = None
+        easy_install.initialize_options(self)
+        self.setup_path = None
+        self.always_copy_from = '.'   # always copy eggs installed in curdir
+
+
+
+    def finalize_options(self):
+        ei = self.get_finalized_command("egg_info")
+        if ei.broken_egg_info:
+            raise DistutilsError(
+            "Please rename %r to %r before using 'develop'"
+            % (ei.egg_info, ei.broken_egg_info)
+            )
+        self.args = [ei.egg_name]
+
+
+
+
+        easy_install.finalize_options(self)
+        self.expand_basedirs()
+        self.expand_dirs()
+        # pick up setup-dir .egg files only: no .egg-info
+        self.package_index.scan(glob.glob('*.egg'))
+
+        self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
+        self.egg_base = ei.egg_base
+        if self.egg_path is None:
+            self.egg_path = os.path.abspath(ei.egg_base)
+
+        target = normalize_path(self.egg_base)
+        if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
+            raise DistutilsOptionError(
+                "--egg-path must be a relative path from the install"
+                " directory to "+target
+        )
+
+        # Make a distribution for the package's source
+        self.dist = Distribution(
+            target,
+            PathMetadata(target, os.path.abspath(ei.egg_info)),
+            project_name = ei.egg_name
+        )
+
+        p = self.egg_base.replace(os.sep,'/')
+        if p!= os.curdir:
+            p = '../' * (p.count('/')+1)
+        self.setup_path = p
+        p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
+        if  p != normalize_path(os.curdir):
+            raise DistutilsOptionError(
+                "Can't get a consistent path to setup script from"
+                " installation directory", p, normalize_path(os.curdir))
+
+    def install_for_development(self):
+        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
+            # If we run 2to3 we can not do this inplace:
+
+            # Ensure metadata is up-to-date
+            self.reinitialize_command('build_py', inplace=0)
+            self.run_command('build_py')
+            bpy_cmd = self.get_finalized_command("build_py")
+            build_path = normalize_path(bpy_cmd.build_lib)
+
+            # Build extensions
+            self.reinitialize_command('egg_info', egg_base=build_path)
+            self.run_command('egg_info')
+
+            self.reinitialize_command('build_ext', inplace=0)
+            self.run_command('build_ext')
+            
+            # Fixup egg-link and easy-install.pth
+            ei_cmd = self.get_finalized_command("egg_info")
+            self.egg_path = build_path
+            self.dist.location = build_path
+            self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)    # XXX
+        else:
+            # Without 2to3 inplace works fine:
+            self.run_command('egg_info')
+
+            # Build extensions in-place
+            self.reinitialize_command('build_ext', inplace=1)
+            self.run_command('build_ext')
+        
+        self.install_site_py()  # ensure that target dir is site-safe
+        if setuptools.bootstrap_install_from:
+            self.easy_install(setuptools.bootstrap_install_from)
+            setuptools.bootstrap_install_from = None
+
+        # create an .egg-link in the installation dir, pointing to our egg
+        log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+        if not self.dry_run:
+            f = open(self.egg_link,"w")
+            f.write(self.egg_path + "\n" + self.setup_path)
+            f.close()
+        # postprocess the installed distro, fixing up .pth, installing scripts,
+        # and handling requirements
+        self.process_distribution(None, self.dist, not self.no_deps)
+
+
+    def uninstall_link(self):
+        if os.path.exists(self.egg_link):
+            log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+            egg_link_file = open(self.egg_link)
+            contents = [line.rstrip() for line in egg_link_file]
+            egg_link_file.close()
+            if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
+                log.warn("Link points to %s: uninstall aborted", contents)
+                return
+            if not self.dry_run:
+                os.unlink(self.egg_link)
+        if not self.dry_run:
+            self.update_pth(self.dist)  # remove any .pth link to us
+        if self.distribution.scripts:
+            # XXX should also check for entry point scripts!
+            log.warn("Note: you must uninstall or replace scripts manually!")
+
+    def install_egg_scripts(self, dist):
+        if dist is not self.dist:
+            # Installing a dependency, so fall back to normal behavior
+            return easy_install.install_egg_scripts(self,dist)
+
+        # create wrapper scripts in the script dir, pointing to dist.scripts
+
+        # new-style...
+        self.install_wrapper_scripts(dist)
+
+        # ...and old-style
+        for script_name in self.distribution.scripts or []:
+            script_path = os.path.abspath(convert_path(script_name))
+            script_name = os.path.basename(script_path)
+            f = open(script_path,'rU')
+            script_text = f.read()
+            f.close()
+            self.install_script(dist, script_name, script_text, script_path)
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/easy_install.py b/vendor/distribute-0.6.35/setuptools/command/easy_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d72f75843c999f14664c1fed34458f530638ca9
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/easy_install.py
@@ -0,0 +1,1947 @@
+#!python
+"""\
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages.  For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ http://packages.python.org/distribute/easy_install.html
+
+"""
+import sys
+import os
+import zipimport
+import shutil
+import tempfile
+import zipfile
+import re
+import stat
+import random
+from glob import glob
+from setuptools import Command, _dont_write_bytecode
+from setuptools.sandbox import run_setup
+from distutils import log, dir_util
+from distutils.util import get_platform
+from distutils.util import convert_path, subst_vars
+from distutils.sysconfig import get_python_lib, get_config_vars
+from distutils.errors import DistutilsArgError, DistutilsOptionError, \
+    DistutilsError, DistutilsPlatformError
+from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
+from setuptools.command import setopt
+from setuptools.archive_util import unpack_archive
+from setuptools.package_index import PackageIndex
+from setuptools.package_index import URL_SCHEME
+from setuptools.command import bdist_egg, egg_info
+from pkg_resources import yield_lines, normalize_path, resource_string, \
+        ensure_directory, get_distribution, find_distributions, \
+        Environment, Requirement, Distribution, \
+        PathMetadata, EggMetadata, WorkingSet, \
+         DistributionNotFound, VersionConflict, \
+        DEVELOP_DIST
+
+sys_executable = os.path.normpath(sys.executable)
+
+__all__ = [
+    'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+    'main', 'get_exe_prefixes',
+]
+
+import site
+HAS_USER_SITE = not sys.version < "2.6" and site.ENABLE_USER_SITE
+
+import struct
+def is_64bit():
+    return struct.calcsize("P") == 8
+
+def samefile(p1,p2):
+    if hasattr(os.path,'samefile') and (
+        os.path.exists(p1) and os.path.exists(p2)
+    ):
+        return os.path.samefile(p1,p2)
+    return (
+        os.path.normpath(os.path.normcase(p1)) ==
+        os.path.normpath(os.path.normcase(p2))
+    )
+
+if sys.version_info <= (3,):
+    def _to_ascii(s):
+        return s
+    def isascii(s):
+        try:
+            unicode(s, 'ascii')
+            return True
+        except UnicodeError:
+            return False
+else:
+    def _to_ascii(s):
+        return s.encode('ascii')
+    def isascii(s):
+        try:
+            s.encode('ascii')
+            return True
+        except UnicodeError:
+            return False
+
+class easy_install(Command):
+    """Manage a download/build/install process"""
+    description = "Find/get/install Python packages"
+    command_consumes_arguments = True
+
+    user_options = [
+        ('prefix=', None, "installation prefix"),
+        ("zip-ok", "z", "install package as a zipfile"),
+        ("multi-version", "m", "make apps have to require() a version"),
+        ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+        ("install-dir=", "d", "install package to DIR"),
+        ("script-dir=", "s", "install scripts to DIR"),
+        ("exclude-scripts", "x", "Don't install scripts"),
+        ("always-copy", "a", "Copy all needed packages to install dir"),
+        ("index-url=", "i", "base URL of Python Package Index"),
+        ("find-links=", "f", "additional URL(s) to search for packages"),
+        ("delete-conflicting", "D", "no longer needed; don't use this"),
+        ("ignore-conflicts-at-my-risk", None,
+            "no longer needed; don't use this"),
+        ("build-directory=", "b",
+            "download/extract/build in DIR; keep the results"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('record=', None,
+         "filename in which to record list of installed files"),
+        ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+        ('site-dirs=','S',"list of directories where .pth files work"),
+        ('editable', 'e', "Install specified packages in editable form"),
+        ('no-deps', 'N', "don't install dependencies"),
+        ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+        ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
+        ('version', None, "print version information and exit"),
+        ('no-find-links', None,
+         "Don't load find-links defined in packages being installed")
+    ]
+    boolean_options = [
+        'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
+        'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable',
+        'no-deps', 'local-snapshots-ok', 'version'
+    ]
+
+    if HAS_USER_SITE:
+        user_options.append(('user', None,
+                             "install in user site-package '%s'" % site.USER_SITE))
+        boolean_options.append('user')
+
+
+    negative_opt = {'always-unzip': 'zip-ok'}
+    create_index = PackageIndex
+
+    def initialize_options(self):
+        if HAS_USER_SITE:
+            whereami = os.path.abspath(__file__)
+            self.user = whereami.startswith(site.USER_SITE)
+        else:
+            self.user = 0
+
+        self.zip_ok = self.local_snapshots_ok = None
+        self.install_dir = self.script_dir = self.exclude_scripts = None
+        self.index_url = None
+        self.find_links = None
+        self.build_directory = None
+        self.args = None
+        self.optimize = self.record = None
+        self.upgrade = self.always_copy = self.multi_version = None
+        self.editable = self.no_deps = self.allow_hosts = None
+        self.root = self.prefix = self.no_report = None
+        self.version = None
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        self.install_base = None
+        self.install_platbase = None
+        if HAS_USER_SITE:
+            self.install_userbase = site.USER_BASE
+            self.install_usersite = site.USER_SITE
+        else:
+            self.install_userbase = None
+            self.install_usersite = None
+        self.no_find_links = None
+
+        # Options not specifiable via command line
+        self.package_index = None
+        self.pth_file = self.always_copy_from = None
+        self.delete_conflicting = None
+        self.ignore_conflicts_at_my_risk = None
+        self.site_dirs = None
+        self.installed_projects = {}
+        self.sitepy_installed = False
+        # Always read easy_install options, even if we are subclassed, or have
+        # an independent instance created.  This ensures that defaults will
+        # always come from the standard configuration file(s)' "easy_install"
+        # section, even if this is a "develop" or "install" command, or some
+        # other embedding.
+        self._dry_run = None
+        self.verbose = self.distribution.verbose
+        self.distribution._set_command_options(
+            self, self.distribution.get_option_dict('easy_install')
+        )
+
+    def delete_blockers(self, blockers):
+        for filename in blockers:
+            if os.path.exists(filename) or os.path.islink(filename):
+                log.info("Deleting %s", filename)
+                if not self.dry_run:
+                    if os.path.isdir(filename) and not os.path.islink(filename):
+                        rmtree(filename)
+                    else:
+                        os.unlink(filename)
+
+    def finalize_options(self):
+        if self.version:
+            print 'distribute %s' % get_distribution('distribute').version
+            sys.exit()
+
+        py_version = sys.version.split()[0]
+        prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
+
+        self.config_vars = {'dist_name': self.distribution.get_name(),
+                            'dist_version': self.distribution.get_version(),
+                            'dist_fullname': self.distribution.get_fullname(),
+                            'py_version': py_version,
+                            'py_version_short': py_version[0:3],
+                            'py_version_nodot': py_version[0] + py_version[2],
+                            'sys_prefix': prefix,
+                            'prefix': prefix,
+                            'sys_exec_prefix': exec_prefix,
+                            'exec_prefix': exec_prefix,
+                            # Only python 3.2+ has abiflags
+                            'abiflags': getattr(sys, 'abiflags', ''),
+                           }
+
+        if HAS_USER_SITE:
+            self.config_vars['userbase'] = self.install_userbase
+            self.config_vars['usersite'] = self.install_usersite
+
+        # fix the install_dir if "--user" was used
+        #XXX: duplicate of the code in the setup command
+        if self.user and HAS_USER_SITE:
+            self.create_home_path()
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            if os.name == 'posix':
+                self.select_scheme("unix_user")
+            else:
+                self.select_scheme(os.name + "_user")
+
+        self.expand_basedirs()
+        self.expand_dirs()
+
+        self._expand('install_dir','script_dir','build_directory','site_dirs')
+        # If a non-default installation directory was specified, default the
+        # script directory to match it.
+        if self.script_dir is None:
+            self.script_dir = self.install_dir
+
+        if self.no_find_links is None:
+            self.no_find_links = False
+
+        # Let install_dir get set by install_lib command, which in turn
+        # gets its info from the install command, and takes into account
+        # --prefix and --home and all that other crud.
+        self.set_undefined_options('install_lib',
+            ('install_dir','install_dir')
+        )
+        # Likewise, set default script_dir from 'install_scripts.install_dir'
+        self.set_undefined_options('install_scripts',
+            ('install_dir', 'script_dir')
+        )
+
+        if self.user and self.install_purelib:
+            self.install_dir = self.install_purelib
+            self.script_dir = self.install_scripts
+        # default --record from the install command
+        self.set_undefined_options('install', ('record', 'record'))
+        normpath = map(normalize_path, sys.path)
+        self.all_site_dirs = get_site_dirs()
+        if self.site_dirs is not None:
+            site_dirs = [
+                os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
+            ]
+            for d in site_dirs:
+                if not os.path.isdir(d):
+                    log.warn("%s (in --site-dirs) does not exist", d)
+                elif normalize_path(d) not in normpath:
+                    raise DistutilsOptionError(
+                        d+" (in --site-dirs) is not on sys.path"
+                    )
+                else:
+                    self.all_site_dirs.append(normalize_path(d))
+        if not self.editable: self.check_site_dir()
+        self.index_url = self.index_url or "http://pypi.python.org/simple"
+        self.shadow_path = self.all_site_dirs[:]
+        for path_item in self.install_dir, normalize_path(self.script_dir):
+            if path_item not in self.shadow_path:
+                self.shadow_path.insert(0, path_item)
+
+        if self.allow_hosts is not None:
+            hosts = [s.strip() for s in self.allow_hosts.split(',')]
+        else:
+            hosts = ['*']
+        if self.package_index is None:
+            self.package_index = self.create_index(
+                self.index_url, search_path = self.shadow_path, hosts=hosts,
+            )
+        self.local_index = Environment(self.shadow_path+sys.path)
+
+        if self.find_links is not None:
+            if isinstance(self.find_links, basestring):
+                self.find_links = self.find_links.split()
+        else:
+            self.find_links = []
+        if self.local_snapshots_ok:
+            self.package_index.scan_egg_links(self.shadow_path+sys.path)
+        if not self.no_find_links:
+            self.package_index.add_find_links(self.find_links)
+        self.set_undefined_options('install_lib', ('optimize','optimize'))
+        if not isinstance(self.optimize,int):
+            try:
+                self.optimize = int(self.optimize)
+                if not (0 <= self.optimize <= 2): raise ValueError
+            except ValueError:
+                raise DistutilsOptionError("--optimize must be 0, 1, or 2")
+
+        if self.delete_conflicting and self.ignore_conflicts_at_my_risk:
+            raise DistutilsOptionError(
+                "Can't use both --delete-conflicting and "
+                "--ignore-conflicts-at-my-risk at the same time"
+            )
+        if self.editable and not self.build_directory:
+            raise DistutilsArgError(
+                "Must specify a build directory (-b) when using --editable"
+            )
+        if not self.args:
+            raise DistutilsArgError(
+                "No urls, filenames, or requirements specified (see --help)")
+
+        self.outputs = []
+
+
+    def _expand_attrs(self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+    def expand_basedirs(self):
+        """Calls `os.path.expanduser` on install_base, install_platbase and
+        root."""
+        self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+    def expand_dirs(self):
+        """Calls `os.path.expanduser` on install dirs."""
+        self._expand_attrs(['install_purelib', 'install_platlib',
+                            'install_lib', 'install_headers',
+                            'install_scripts', 'install_data',])
+
+    def run(self):
+        if self.verbose != self.distribution.verbose:
+            log.set_verbosity(self.verbose)
+        try:
+            for spec in self.args:
+                self.easy_install(spec, not self.no_deps)
+            if self.record:
+                outputs = self.outputs
+                if self.root:               # strip any package prefix
+                    root_len = len(self.root)
+                    for counter in xrange(len(outputs)):
+                        outputs[counter] = outputs[counter][root_len:]
+                from distutils import file_util
+                self.execute(
+                    file_util.write_file, (self.record, outputs),
+                    "writing list of installed files to '%s'" %
+                    self.record
+                )
+            self.warn_deprecated_options()
+        finally:
+            log.set_verbosity(self.distribution.verbose)
+
+    def pseudo_tempname(self):
+        """Return a pseudo-tempname base in the install directory.
+        This code is intentionally naive; if a malicious party can write to
+        the target directory you're already in deep doodoo.
+        """
+        try:
+            pid = os.getpid()
+        except:
+            pid = random.randint(0,sys.maxint)
+        return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+
+    def warn_deprecated_options(self):
+        if self.delete_conflicting or self.ignore_conflicts_at_my_risk:
+            log.warn(
+                "Note: The -D, --delete-conflicting and"
+                " --ignore-conflicts-at-my-risk no longer have any purpose"
+                " and should not be used."
+            )
+
+    def check_site_dir(self):
+        """Verify that self.install_dir is .pth-capable dir, if needed"""
+
+        instdir = normalize_path(self.install_dir)
+        pth_file = os.path.join(instdir,'easy-install.pth')
+
+        # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+        is_site_dir = instdir in self.all_site_dirs
+
+        if not is_site_dir:
+            # No?  Then directly test whether it does .pth file processing
+            is_site_dir = self.check_pth_processing()
+        else:
+            # make sure we can write to target dir
+            testfile = self.pseudo_tempname()+'.write-test'
+            test_exists = os.path.exists(testfile)
+            try:
+                if test_exists: os.unlink(testfile)
+                open(testfile,'w').close()
+                os.unlink(testfile)
+            except (OSError,IOError):
+                self.cant_write_to_target()
+
+        if not is_site_dir and not self.multi_version:
+            # Can't install non-multi to non-site dir
+            raise DistutilsError(self.no_default_version_msg())
+
+        if is_site_dir:
+            if self.pth_file is None:
+                self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
+        else:
+            self.pth_file = None
+
+        PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
+        if instdir not in map(normalize_path, filter(None,PYTHONPATH)):
+            # only PYTHONPATH dirs need a site.py, so pretend it's there
+            self.sitepy_installed = True
+        elif self.multi_version and not os.path.exists(pth_file):
+            self.sitepy_installed = True    # don't need site.py in this case
+            self.pth_file = None            # and don't create a .pth file
+        self.install_dir = instdir
+
+    def cant_write_to_target(self):
+        msg = """can't create or remove files in install directory
+
+The following error occurred while trying to add or remove files in the
+installation directory:
+
+    %s
+
+The installation directory you specified (via --install-dir, --prefix, or
+the distutils default setting) was:
+
+    %s
+"""     % (sys.exc_info()[1], self.install_dir,)
+
+        if not os.path.exists(self.install_dir):
+            msg += """
+This directory does not currently exist.  Please create it and try again, or
+choose a different installation directory (using the -d or --install-dir
+option).
+"""
+        else:
+            msg += """
+Perhaps your account does not have write access to this directory?  If the
+installation directory is a system-owned directory, you may need to sign in
+as the administrator or "root" account.  If you do not have administrative
+access to this machine, you may wish to choose a different installation
+directory, preferably one that is listed in your PYTHONPATH environment
+variable.
+
+For information on other options, you may wish to consult the
+documentation at:
+
+  http://packages.python.org/distribute/easy_install.html
+
+Please make the appropriate changes for your system and try again.
+"""
+        raise DistutilsError(msg)
+
+
+
+
+    def check_pth_processing(self):
+        """Empirically verify whether .pth files are supported in inst. dir"""
+        instdir = self.install_dir
+        log.info("Checking .pth file support in %s", instdir)
+        pth_file = self.pseudo_tempname()+".pth"
+        ok_file = pth_file+'.ok'
+        ok_exists = os.path.exists(ok_file)
+        try:
+            if ok_exists: os.unlink(ok_file)
+            dirname = os.path.dirname(ok_file)
+            if not os.path.exists(dirname):
+                os.makedirs(dirname)
+            f = open(pth_file,'w')
+        except (OSError,IOError):
+            self.cant_write_to_target()
+        else:
+            try:
+                f.write("import os; f = open(%r, 'w'); f.write('OK'); f.close()\n" % (ok_file,))
+                f.close(); f=None
+                executable = sys.executable
+                if os.name=='nt':
+                    dirname,basename = os.path.split(executable)
+                    alt = os.path.join(dirname,'pythonw.exe')
+                    if basename.lower()=='python.exe' and os.path.exists(alt):
+                        # use pythonw.exe to avoid opening a console window
+                        executable = alt
+
+                from distutils.spawn import spawn
+                spawn([executable,'-E','-c','pass'],0)
+
+                if os.path.exists(ok_file):
+                    log.info(
+                        "TEST PASSED: %s appears to support .pth files",
+                        instdir
+                    )
+                    return True
+            finally:
+                if f: f.close()
+                if os.path.exists(ok_file): os.unlink(ok_file)
+                if os.path.exists(pth_file): os.unlink(pth_file)
+        if not self.multi_version:
+            log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+        return False
+
+    def install_egg_scripts(self, dist):
+        """Write all the scripts for `dist`, unless scripts are excluded"""
+        if not self.exclude_scripts and dist.metadata_isdir('scripts'):
+            for script_name in dist.metadata_listdir('scripts'):
+                self.install_script(
+                    dist, script_name,
+                    dist.get_metadata('scripts/'+script_name)
+                )
+        self.install_wrapper_scripts(dist)
+
+    def add_output(self, path):
+        if os.path.isdir(path):
+            for base, dirs, files in os.walk(path):
+                for filename in files:
+                    self.outputs.append(os.path.join(base,filename))
+        else:
+            self.outputs.append(path)
+
+    def not_editable(self, spec):
+        if self.editable:
+            raise DistutilsArgError(
+                "Invalid argument %r: you can't use filenames or URLs "
+                "with --editable (except via the --find-links option)."
+                % (spec,)
+            )
+
+    def check_editable(self,spec):
+        if not self.editable:
+            return
+
+        if os.path.exists(os.path.join(self.build_directory, spec.key)):
+            raise DistutilsArgError(
+                "%r already exists in %s; can't do a checkout there" %
+                (spec.key, self.build_directory)
+            )
+
+
+
+
+
+
+    def easy_install(self, spec, deps=False):
+        tmpdir = tempfile.mkdtemp(prefix="easy_install-")
+        download = None
+        if not self.editable: self.install_site_py()
+
+        try:
+            if not isinstance(spec,Requirement):
+                if URL_SCHEME(spec):
+                    # It's a url, download it to tmpdir and process
+                    self.not_editable(spec)
+                    download = self.package_index.download(spec, tmpdir)
+                    return self.install_item(None, download, tmpdir, deps, True)
+
+                elif os.path.exists(spec):
+                    # Existing file or directory, just process it directly
+                    self.not_editable(spec)
+                    return self.install_item(None, spec, tmpdir, deps, True)
+                else:
+                    spec = parse_requirement_arg(spec)
+
+            self.check_editable(spec)
+            dist = self.package_index.fetch_distribution(
+                spec, tmpdir, self.upgrade, self.editable, not self.always_copy,
+                self.local_index
+            )
+
+            if dist is None:
+                msg = "Could not find suitable distribution for %r" % spec
+                if self.always_copy:
+                    msg+=" (--always-copy skips system and development eggs)"
+                raise DistutilsError(msg)
+            elif dist.precedence==DEVELOP_DIST:
+                # .egg-info dists don't need installing, just process deps
+                self.process_distribution(spec, dist, deps, "Using")
+                return dist
+            else:
+                return self.install_item(spec, dist.location, tmpdir, deps)
+
+        finally:
+            if os.path.exists(tmpdir):
+                rmtree(tmpdir)
+
+    def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+
+        # Installation is also needed if file in tmpdir or is not an egg
+        install_needed = install_needed or self.always_copy
+        install_needed = install_needed or os.path.dirname(download) == tmpdir
+        install_needed = install_needed or not download.endswith('.egg')
+        install_needed = install_needed or (
+            self.always_copy_from is not None and
+            os.path.dirname(normalize_path(download)) ==
+            normalize_path(self.always_copy_from)
+        )
+
+        if spec and not install_needed:
+            # at this point, we know it's a local .egg, we just don't know if
+            # it's already installed.
+            for dist in self.local_index[spec.project_name]:
+                if dist.location==download:
+                    break
+            else:
+                install_needed = True   # it's not in the local index
+
+        log.info("Processing %s", os.path.basename(download))
+
+        if install_needed:
+            dists = self.install_eggs(spec, download, tmpdir)
+            for dist in dists:
+                self.process_distribution(spec, dist, deps)
+        else:
+            dists = [self.check_conflicts(self.egg_distribution(download))]
+            self.process_distribution(spec, dists[0], deps, "Using")
+
+        if spec is not None:
+            for dist in dists:
+                if dist in spec:
+                    return dist
+
+
+
+    def select_scheme(self, name):
+        """Sets the install directories by applying the install schemes."""
+        # it's the caller's problem if they supply a bad name!
+        scheme = INSTALL_SCHEMES[name]
+        for key in SCHEME_KEYS:
+            attrname = 'install_' + key
+            if getattr(self, attrname) is None:
+                setattr(self, attrname, scheme[key])
+
+
+
+
+    def process_distribution(self, requirement, dist, deps=True, *info):
+        self.update_pth(dist)
+        self.package_index.add(dist)
+        self.local_index.add(dist)
+        if not self.editable:
+            self.install_egg_scripts(dist)
+        self.installed_projects[dist.key] = dist
+        log.info(self.installation_report(requirement, dist, *info))
+        if (dist.has_metadata('dependency_links.txt') and
+            not self.no_find_links):
+            self.package_index.add_find_links(
+                dist.get_metadata_lines('dependency_links.txt')
+            )
+        if not deps and not self.always_copy:
+            return
+        elif requirement is not None and dist.key != requirement.key:
+            log.warn("Skipping dependencies for %s", dist)
+            return  # XXX this is not the distribution we were looking for
+        elif requirement is None or dist not in requirement:
+            # if we wound up with a different version, resolve what we've got
+            distreq = dist.as_requirement()
+            requirement = requirement or distreq
+            requirement = Requirement(
+                distreq.project_name, distreq.specs, requirement.extras
+            )
+        log.info("Processing dependencies for %s", requirement)
+        try:
+            distros = WorkingSet([]).resolve(
+                [requirement], self.local_index, self.easy_install
+            )
+        except DistributionNotFound, e:
+            raise DistutilsError(
+                "Could not find required distribution %s" % e.args
+            )
+        except VersionConflict, e:
+            raise DistutilsError(
+                "Installed distribution %s conflicts with requirement %s"
+                % e.args
+            )
+        if self.always_copy or self.always_copy_from:
+            # Force all the relevant distros to be copied or activated
+            for dist in distros:
+                if dist.key not in self.installed_projects:
+                    self.easy_install(dist.as_requirement())
+        log.info("Finished processing dependencies for %s", requirement)
+
+    def should_unzip(self, dist):
+        if self.zip_ok is not None:
+            return not self.zip_ok
+        if dist.has_metadata('not-zip-safe'):
+            return True
+        if not dist.has_metadata('zip-safe'):
+            return True
+        return True
+
+    def maybe_move(self, spec, dist_filename, setup_base):
+        dst = os.path.join(self.build_directory, spec.key)
+        if os.path.exists(dst):
+            log.warn(
+               "%r already exists in %s; build directory %s will not be kept",
+               spec.key, self.build_directory, setup_base
+            )
+            return setup_base
+        if os.path.isdir(dist_filename):
+            setup_base = dist_filename
+        else:
+            if os.path.dirname(dist_filename)==setup_base:
+                os.unlink(dist_filename)   # get it out of the tmp dir
+            contents = os.listdir(setup_base)
+            if len(contents)==1:
+                dist_filename = os.path.join(setup_base,contents[0])
+                if os.path.isdir(dist_filename):
+                    # if the only thing there is a directory, move it instead
+                    setup_base = dist_filename
+        ensure_directory(dst); shutil.move(setup_base, dst)
+        return dst
+
+    def install_wrapper_scripts(self, dist):
+        if not self.exclude_scripts:
+            for args in get_script_args(dist):
+                self.write_script(*args)
+
+
+
+    def install_script(self, dist, script_name, script_text, dev_path=None):
+        """Generate a legacy script wrapper and install it"""
+        spec = str(dist.as_requirement())
+        is_script = is_python_script(script_text, script_name)
+
+        def get_template(filename):
+            """
+            There are a couple of template scripts in the package. This
+            function loads one of them and prepares it for use.
+
+            These templates use triple-quotes to escape variable
+            substitutions so the scripts get the 2to3 treatment when build
+            on Python 3. The templates cannot use triple-quotes naturally.
+            """
+            raw_bytes = resource_string('setuptools', template_name)
+            template_str = raw_bytes.decode('utf-8')
+            clean_template = template_str.replace('"""', '')
+            return clean_template
+
+        if is_script:
+            template_name = 'script template.py'
+            if dev_path:
+                template_name = template_name.replace('.py', ' (dev).py')
+            script_text = (get_script_header(script_text) +
+                get_template(template_name) % locals())
+        self.write_script(script_name, _to_ascii(script_text), 'b')
+
+    def write_script(self, script_name, contents, mode="t", blockers=()):
+        """Write an executable file to the scripts directory"""
+        self.delete_blockers(   # clean up old .py/.pyw w/o a script
+            [os.path.join(self.script_dir,x) for x in blockers])
+        log.info("Installing %s script to %s", script_name, self.script_dir)
+        target = os.path.join(self.script_dir, script_name)
+        self.add_output(target)
+
+        mask = current_umask()
+        if not self.dry_run:
+            ensure_directory(target)
+            f = open(target,"w"+mode)
+            f.write(contents)
+            f.close()
+            chmod(target, 0777-mask)
+
+
+
+
+    def install_eggs(self, spec, dist_filename, tmpdir):
+        # .egg dirs or files are already built, so just return them
+        if dist_filename.lower().endswith('.egg'):
+            return [self.install_egg(dist_filename, tmpdir)]
+        elif dist_filename.lower().endswith('.exe'):
+            return [self.install_exe(dist_filename, tmpdir)]
+
+        # Anything else, try to extract and build
+        setup_base = tmpdir
+        if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+            unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+        elif os.path.isdir(dist_filename):
+            setup_base = os.path.abspath(dist_filename)
+
+        if (setup_base.startswith(tmpdir)   # something we downloaded
+            and self.build_directory and spec is not None
+        ):
+            setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+        # Find the setup.py file
+        setup_script = os.path.join(setup_base, 'setup.py')
+
+        if not os.path.exists(setup_script):
+            setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+            if not setups:
+                raise DistutilsError(
+                    "Couldn't find a setup script in %s" % os.path.abspath(dist_filename)
+                )
+            if len(setups)>1:
+                raise DistutilsError(
+                    "Multiple setup scripts in %s" % os.path.abspath(dist_filename)
+                )
+            setup_script = setups[0]
+
+        # Now run it, and return the result
+        if self.editable:
+            log.info(self.report_editable(spec, setup_script))
+            return []
+        else:
+            return self.build_and_install(setup_script, setup_base)
+
+    def egg_distribution(self, egg_path):
+        if os.path.isdir(egg_path):
+            metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
+        else:
+            metadata = EggMetadata(zipimport.zipimporter(egg_path))
+        return Distribution.from_filename(egg_path,metadata=metadata)
+
+    def install_egg(self, egg_path, tmpdir):
+        destination = os.path.join(self.install_dir,os.path.basename(egg_path))
+        destination = os.path.abspath(destination)
+        if not self.dry_run:
+            ensure_directory(destination)
+
+        dist = self.egg_distribution(egg_path)
+        self.check_conflicts(dist)
+        if not samefile(egg_path, destination):
+            if os.path.isdir(destination) and not os.path.islink(destination):
+                dir_util.remove_tree(destination, dry_run=self.dry_run)
+            elif os.path.exists(destination):
+                self.execute(os.unlink,(destination,),"Removing "+destination)
+            uncache_zipdir(destination)
+            if os.path.isdir(egg_path):
+                if egg_path.startswith(tmpdir):
+                    f,m = shutil.move, "Moving"
+                else:
+                    f,m = shutil.copytree, "Copying"
+            elif self.should_unzip(dist):
+                self.mkpath(destination)
+                f,m = self.unpack_and_compile, "Extracting"
+            elif egg_path.startswith(tmpdir):
+                f,m = shutil.move, "Moving"
+            else:
+                f,m = shutil.copy2, "Copying"
+
+            self.execute(f, (egg_path, destination),
+                (m+" %s to %s") %
+                (os.path.basename(egg_path),os.path.dirname(destination)))
+
+        self.add_output(destination)
+        return self.egg_distribution(destination)
+
+    def install_exe(self, dist_filename, tmpdir):
+        # See if it's valid, get data
+        cfg = extract_wininst_cfg(dist_filename)
+        if cfg is None:
+            raise DistutilsError(
+                "%s is not a valid distutils Windows .exe" % dist_filename
+            )
+        # Create a dummy distribution object until we build the real distro
+        dist = Distribution(None,
+            project_name=cfg.get('metadata','name'),
+            version=cfg.get('metadata','version'), platform=get_platform()
+        )
+
+        # Convert the .exe to an unpacked egg
+        egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
+        egg_tmp  = egg_path+'.tmp'
+        egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+        pkg_inf = os.path.join(egg_info, 'PKG-INFO')
+        ensure_directory(pkg_inf)   # make sure EGG-INFO dir exists
+        dist._provider = PathMetadata(egg_tmp, egg_info)    # XXX
+        self.exe_to_egg(dist_filename, egg_tmp)
+
+        # Write EGG-INFO/PKG-INFO
+        if not os.path.exists(pkg_inf):
+            f = open(pkg_inf,'w')
+            f.write('Metadata-Version: 1.0\n')
+            for k,v in cfg.items('metadata'):
+                if k<>'target_version':
+                    f.write('%s: %s\n' % (k.replace('_','-').title(), v))
+            f.close()
+        script_dir = os.path.join(egg_info,'scripts')
+        self.delete_blockers(   # delete entry-point scripts to avoid duping
+            [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
+        )
+        # Build .egg file from tmpdir
+        bdist_egg.make_zipfile(
+            egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
+        )
+        # install the .egg
+        return self.install_egg(egg_path, tmpdir)
+
+    def exe_to_egg(self, dist_filename, egg_tmp):
+        """Extract a bdist_wininst to the directories an egg would use"""
+        # Check for .pth file and set up prefix translations
+        prefixes = get_exe_prefixes(dist_filename)
+        to_compile = []
+        native_libs = []
+        top_level = {}
+        def process(src,dst):
+            s = src.lower()
+            for old,new in prefixes:
+                if s.startswith(old):
+                    src = new+src[len(old):]
+                    parts = src.split('/')
+                    dst = os.path.join(egg_tmp, *parts)
+                    dl = dst.lower()
+                    if dl.endswith('.pyd') or dl.endswith('.dll'):
+                        parts[-1] = bdist_egg.strip_module(parts[-1])
+                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        native_libs.append(src)
+                    elif dl.endswith('.py') and old!='SCRIPTS/':
+                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        to_compile.append(dst)
+                    return dst
+            if not src.endswith('.pth'):
+                log.warn("WARNING: can't process %s", src)
+            return None
+        # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+        unpack_archive(dist_filename, egg_tmp, process)
+        stubs = []
+        for res in native_libs:
+            if res.lower().endswith('.pyd'):    # create stubs for .pyd's
+                parts = res.split('/')
+                resource = parts[-1]
+                parts[-1] = bdist_egg.strip_module(parts[-1])+'.py'
+                pyfile = os.path.join(egg_tmp, *parts)
+                to_compile.append(pyfile); stubs.append(pyfile)
+                bdist_egg.write_stub(resource, pyfile)
+        self.byte_compile(to_compile)   # compile .py's
+        bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
+            bdist_egg.analyze_egg(egg_tmp, stubs))  # write zip-safety flag
+
+        for name in 'top_level','native_libs':
+            if locals()[name]:
+                txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
+                if not os.path.exists(txt):
+                    f = open(txt,'w')
+                    f.write('\n'.join(locals()[name])+'\n')
+                    f.close()
+
+    def check_conflicts(self, dist):
+        """Verify that there are no conflicting "old-style" packages"""
+
+        return dist     # XXX temporarily disable until new strategy is stable
+        from imp import find_module, get_suffixes
+        from glob import glob
+
+        blockers = []
+        names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr
+
+        exts = {'.pyc':1, '.pyo':1}     # get_suffixes() might leave one out
+        for ext,mode,typ in get_suffixes():
+            exts[ext] = 1
+
+        for path,files in expand_paths([self.install_dir]+self.all_site_dirs):
+            for filename in files:
+                base,ext = os.path.splitext(filename)
+                if base in names:
+                    if not ext:
+                        # no extension, check for package
+                        try:
+                            f, filename, descr = find_module(base, [path])
+                        except ImportError:
+                            continue
+                        else:
+                            if f: f.close()
+                            if filename not in blockers:
+                                blockers.append(filename)
+                    elif ext in exts and base!='site':  # XXX ugh
+                        blockers.append(os.path.join(path,filename))
+        if blockers:
+            self.found_conflicts(dist, blockers)
+
+        return dist
+
+    def found_conflicts(self, dist, blockers):
+        if self.delete_conflicting:
+            log.warn("Attempting to delete conflicting packages:")
+            return self.delete_blockers(blockers)
+
+        msg = """\
+-------------------------------------------------------------------------
+CONFLICT WARNING:
+
+The following modules or packages have the same names as modules or
+packages being installed, and will be *before* the installed packages in
+Python's search path.  You MUST remove all of the relevant files and
+directories before you will be able to use the package(s) you are
+installing:
+
+   %s
+
+""" % '\n   '.join(blockers)
+
+        if self.ignore_conflicts_at_my_risk:
+            msg += """\
+(Note: you can run EasyInstall on '%s' with the
+--delete-conflicting option to attempt deletion of the above files
+and/or directories.)
+""" % dist.project_name
+        else:
+            msg += """\
+Note: you can attempt this installation again with EasyInstall, and use
+either the --delete-conflicting (-D) option or the
+--ignore-conflicts-at-my-risk option, to either delete the above files
+and directories, or to ignore the conflicts, respectively.  Note that if
+you ignore the conflicts, the installed package(s) may not work.
+"""
+        msg += """\
+-------------------------------------------------------------------------
+"""
+        sys.stderr.write(msg)
+        sys.stderr.flush()
+        if not self.ignore_conflicts_at_my_risk:
+            raise DistutilsError("Installation aborted due to conflicts")
+
+    def installation_report(self, req, dist, what="Installed"):
+        """Helpful installation message for display to package users"""
+        msg = "\n%(what)s %(eggloc)s%(extras)s"
+        if self.multi_version and not self.no_report:
+            msg += """
+
+Because this distribution was installed --multi-version, before you can
+import modules from this package in an application, you will need to
+'import pkg_resources' and then use a 'require()' call similar to one of
+these examples, in order to select the desired version:
+
+    pkg_resources.require("%(name)s")  # latest installed version
+    pkg_resources.require("%(name)s==%(version)s")  # this exact version
+    pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
+"""
+            if self.install_dir not in map(normalize_path,sys.path):
+                msg += """
+
+Note also that the installation directory must be on sys.path at runtime for
+this to work.  (e.g. by being the application's script directory, by being on
+PYTHONPATH, or by being added to sys.path by your code.)
+"""
+        eggloc = dist.location
+        name = dist.project_name
+        version = dist.version
+        extras = '' # TODO: self.report_extras(req, dist)
+        return msg % locals()
+
+    def report_editable(self, spec, setup_script):
+        dirname = os.path.dirname(setup_script)
+        python = sys.executable
+        return """\nExtracted editable version of %(spec)s to %(dirname)s
+
+If it uses setuptools in its setup script, you can activate it in
+"development" mode by going to that directory and running::
+
+    %(python)s setup.py develop
+
+See the setuptools documentation for the "develop" command for more info.
+""" % locals()
+
+    def run_setup(self, setup_script, setup_base, args):
+        sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+        sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+        args = list(args)
+        if self.verbose>2:
+            v = 'v' * (self.verbose - 1)
+            args.insert(0,'-'+v)
+        elif self.verbose<2:
+            args.insert(0,'-q')
+        if self.dry_run:
+            args.insert(0,'-n')
+        log.info(
+            "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
+        )
+        try:
+            run_setup(setup_script, args)
+        except SystemExit, v:
+            raise DistutilsError("Setup script exited with %s" % (v.args[0],))
+
+    def build_and_install(self, setup_script, setup_base):
+        args = ['bdist_egg', '--dist-dir']
+
+        dist_dir = tempfile.mkdtemp(
+            prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+        )
+        try:
+            self._set_fetcher_options(os.path.dirname(setup_script))
+            args.append(dist_dir)
+
+            self.run_setup(setup_script, setup_base, args)
+            all_eggs = Environment([dist_dir])
+            eggs = []
+            for key in all_eggs:
+                for dist in all_eggs[key]:
+                    eggs.append(self.install_egg(dist.location, setup_base))
+            if not eggs and not self.dry_run:
+                log.warn("No eggs found in %s (setup script problem?)",
+                    dist_dir)
+            return eggs
+        finally:
+            rmtree(dist_dir)
+            log.set_verbosity(self.verbose) # restore our log verbosity
+
+    def _set_fetcher_options(self, base):
+        """
+        When easy_install is about to run bdist_egg on a source dist, that
+        source dist might have 'setup_requires' directives, requiring
+        additional fetching. Ensure the fetcher options given to easy_install
+        are available to that command as well.
+        """
+        # find the fetch options from easy_install and write them out
+        #  to the setup.cfg file.
+        ei_opts = self.distribution.get_option_dict('easy_install').copy()
+        fetch_directives = (
+            'find_links', 'site_dirs', 'index_url', 'optimize',
+            'site_dirs', 'allow_hosts',
+        )
+        fetch_options = {}
+        for key, val in ei_opts.iteritems():
+            if key not in fetch_directives: continue
+            fetch_options[key.replace('_', '-')] = val[1]
+        # create a settings dictionary suitable for `edit_config`
+        settings = dict(easy_install=fetch_options)
+        cfg_filename = os.path.join(base, 'setup.cfg')
+        setopt.edit_config(cfg_filename, settings)
+
+
+    def update_pth(self,dist):
+        if self.pth_file is None:
+            return
+
+        for d in self.pth_file[dist.key]:    # drop old entries
+            if self.multi_version or d.location != dist.location:
+                log.info("Removing %s from easy-install.pth file", d)
+                self.pth_file.remove(d)
+                if d.location in self.shadow_path:
+                    self.shadow_path.remove(d.location)
+
+        if not self.multi_version:
+            if dist.location in self.pth_file.paths:
+                log.info(
+                    "%s is already the active version in easy-install.pth",
+                    dist
+                )
+            else:
+                log.info("Adding %s to easy-install.pth file", dist)
+                self.pth_file.add(dist) # add new entry
+                if dist.location not in self.shadow_path:
+                    self.shadow_path.append(dist.location)
+
+        if not self.dry_run:
+
+            self.pth_file.save()
+            if dist.key=='distribute':
+                # Ensure that setuptools itself never becomes unavailable!
+                # XXX should this check for latest version?
+                filename = os.path.join(self.install_dir,'setuptools.pth')
+                if os.path.islink(filename): os.unlink(filename)
+                f = open(filename, 'wt')
+                f.write(self.pth_file.make_relative(dist.location)+'\n')
+                f.close()
+
+    def unpack_progress(self, src, dst):
+        # Progress filter for unpacking
+        log.debug("Unpacking %s to %s", src, dst)
+        return dst     # only unpack-and-compile skips files for dry run
+
+    def unpack_and_compile(self, egg_path, destination):
+        to_compile = []; to_chmod = []
+
+        def pf(src,dst):
+            if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+                to_compile.append(dst)
+                to_chmod.append(dst)
+            elif dst.endswith('.dll') or dst.endswith('.so'):
+                to_chmod.append(dst)
+            self.unpack_progress(src,dst)
+            return not self.dry_run and dst or None
+
+        unpack_archive(egg_path, destination, pf)
+        self.byte_compile(to_compile)
+        if not self.dry_run:
+            for f in to_chmod:
+                mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755
+                chmod(f, mode)
+
+    def byte_compile(self, to_compile):
+        if _dont_write_bytecode:
+            self.warn('byte-compiling is disabled, skipping.')
+            return
+
+        from distutils.util import byte_compile
+        try:
+            # try to make the byte compile messages quieter
+            log.set_verbosity(self.verbose - 1)
+
+            byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+            if self.optimize:
+                byte_compile(
+                    to_compile, optimize=self.optimize, force=1,
+                    dry_run=self.dry_run
+                )
+        finally:
+            log.set_verbosity(self.verbose)     # restore original verbosity
+
+
+
+
+
+
+
+
+    def no_default_version_msg(self):
+        return """bad install directory or PYTHONPATH
+
+You are attempting to install a package to a directory that is not
+on PYTHONPATH and which Python does not read ".pth" files from.  The
+installation directory you specified (via --install-dir, --prefix, or
+the distutils default setting) was:
+
+    %s
+
+and your PYTHONPATH environment variable currently contains:
+
+    %r
+
+Here are some of your options for correcting the problem:
+
+* You can choose a different installation directory, i.e., one that is
+  on PYTHONPATH or supports .pth files
+
+* You can add the installation directory to the PYTHONPATH environment
+  variable.  (It must then also be on PYTHONPATH whenever you run
+  Python and want to use the package(s) you are installing.)
+
+* You can set up the installation directory to support ".pth" files by
+  using one of the approaches described here:
+
+  http://packages.python.org/distribute/easy_install.html#custom-installation-locations
+
+Please make the appropriate changes for your system and try again.""" % (
+        self.install_dir, os.environ.get('PYTHONPATH','')
+    )
+
+
+
+
+
+
+
+
+
+
+    def install_site_py(self):
+        """Make sure there's a site.py in the target dir, if needed"""
+
+        if self.sitepy_installed:
+            return  # already did it, or don't need to
+
+        sitepy = os.path.join(self.install_dir, "site.py")
+        source = resource_string(Requirement.parse("distribute"), "site.py")
+        current = ""
+
+        if os.path.exists(sitepy):
+            log.debug("Checking existing site.py in %s", self.install_dir)
+            f = open(sitepy,'rb')
+            current = f.read()
+            # we want str, not bytes
+            if sys.version_info >= (3,):
+                current = current.decode()
+
+            f.close()
+            if not current.startswith('def __boot():'):
+                raise DistutilsError(
+                    "%s is not a setuptools-generated site.py; please"
+                    " remove it." % sitepy
+                )
+
+        if current != source:
+            log.info("Creating %s", sitepy)
+            if not self.dry_run:
+                ensure_directory(sitepy)
+                f = open(sitepy,'wb')
+                f.write(source)
+                f.close()
+            self.byte_compile([sitepy])
+
+        self.sitepy_installed = True
+
+
+
+
+    def create_home_path(self):
+        """Create directories under ~."""
+        if not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for name, path in self.config_vars.iteritems():
+            if path.startswith(home) and not os.path.isdir(path):
+                self.debug_print("os.makedirs('%s', 0700)" % path)
+                os.makedirs(path, 0700)
+
+
+
+
+
+
+
+    INSTALL_SCHEMES = dict(
+        posix = dict(
+            install_dir = '$base/lib/python$py_version_short/site-packages',
+            script_dir  = '$base/bin',
+        ),
+    )
+
+    DEFAULT_SCHEME = dict(
+        install_dir = '$base/Lib/site-packages',
+        script_dir  = '$base/Scripts',
+    )
+
+    def _expand(self, *attrs):
+        config_vars = self.get_finalized_command('install').config_vars
+
+        if self.prefix:
+            # Set default install_dir/scripts from --prefix
+            config_vars = config_vars.copy()
+            config_vars['base'] = self.prefix
+            scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
+            for attr,val in scheme.items():
+                if getattr(self,attr,None) is None:
+                    setattr(self,attr,val)
+
+        from distutils.util import subst_vars
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                val = subst_vars(val, config_vars)
+                if os.name == 'posix':
+                    val = os.path.expanduser(val)
+                setattr(self, attr, val)
+
+
+
+
+
+
+
+
+
+def get_site_dirs():
+    # return a list of 'site' dirs
+    sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep))
+    prefixes = [sys.prefix]
+    if sys.exec_prefix != sys.prefix:
+        prefixes.append(sys.exec_prefix)
+    for prefix in prefixes:
+        if prefix:
+            if sys.platform in ('os2emx', 'riscos'):
+                sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+            elif os.sep == '/':
+                sitedirs.extend([os.path.join(prefix,
+                                         "lib",
+                                         "python" + sys.version[:3],
+                                         "site-packages"),
+                            os.path.join(prefix, "lib", "site-python")])
+            else:
+                sitedirs.extend(
+                    [prefix, os.path.join(prefix, "lib", "site-packages")]
+                )
+            if sys.platform == 'darwin':
+                # for framework builds *only* we add the standard Apple
+                # locations. Currently only per-user, but /Library and
+                # /Network/Library could be added too
+                if 'Python.framework' in prefix:
+                    home = os.environ.get('HOME')
+                    if home:
+                        sitedirs.append(
+                            os.path.join(home,
+                                         'Library',
+                                         'Python',
+                                         sys.version[:3],
+                                         'site-packages'))
+    for plat_specific in (0,1):
+        site_lib = get_python_lib(plat_specific)
+        if site_lib not in sitedirs: sitedirs.append(site_lib)
+
+    if HAS_USER_SITE:
+        sitedirs.append(site.USER_SITE)
+
+    sitedirs = map(normalize_path, sitedirs)
+
+    return sitedirs
+
+
+def expand_paths(inputs):
+    """Yield sys.path directories that might contain "old-style" packages"""
+
+    seen = {}
+
+    for dirname in inputs:
+        dirname = normalize_path(dirname)
+        if dirname in seen:
+            continue
+
+        seen[dirname] = 1
+        if not os.path.isdir(dirname):
+            continue
+
+        files = os.listdir(dirname)
+        yield dirname, files
+
+        for name in files:
+            if not name.endswith('.pth'):
+                # We only care about the .pth files
+                continue
+            if name in ('easy-install.pth','setuptools.pth'):
+                # Ignore .pth files that we control
+                continue
+
+            # Read the .pth file
+            f = open(os.path.join(dirname,name))
+            lines = list(yield_lines(f))
+            f.close()
+
+            # Yield existing non-dupe, non-import directory lines from it
+            for line in lines:
+                if not line.startswith("import"):
+                    line = normalize_path(line.rstrip())
+                    if line not in seen:
+                        seen[line] = 1
+                        if not os.path.isdir(line):
+                            continue
+                        yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+    """Extract configuration data from a bdist_wininst .exe
+
+    Returns a ConfigParser.RawConfigParser, or None
+    """
+    f = open(dist_filename,'rb')
+    try:
+        endrec = zipfile._EndRecData(f)
+        if endrec is None:
+            return None
+
+        prepended = (endrec[9] - endrec[5]) - endrec[6]
+        if prepended < 12:  # no wininst data here
+            return None
+        f.seek(prepended-12)
+
+        import struct, StringIO, ConfigParser
+        tag, cfglen, bmlen = struct.unpack("<iii",f.read(12))
+        if tag not in (0x1234567A, 0x1234567B):
+            return None     # not a valid tag
+
+        f.seek(prepended-(12+cfglen))
+        cfg = ConfigParser.RawConfigParser({'version':'','target_version':''})
+        try:
+            part = f.read(cfglen)
+            # part is in bytes, but we need to read up to the first null
+            #  byte.
+            if sys.version_info >= (2,6):
+                null_byte = bytes([0])
+            else:
+                null_byte = chr(0)
+            config = part.split(null_byte, 1)[0]
+            # Now the config is in bytes, but on Python 3, it must be
+            #  unicode for the RawConfigParser, so decode it. Is this the
+            #  right encoding?
+            config = config.decode('ascii')
+            cfg.readfp(StringIO.StringIO(config))
+        except ConfigParser.Error:
+            return None
+        if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+            return None
+        return cfg
+
+    finally:
+        f.close()
+
+
+
+
+
+
+
+
+def get_exe_prefixes(exe_filename):
+    """Get exe->egg path translations for a given .exe file"""
+
+    prefixes = [
+        ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
+        ('PLATLIB/', ''),
+        ('SCRIPTS/', 'EGG-INFO/scripts/'),
+        ('DATA/LIB/site-packages', ''),
+    ]
+    z = zipfile.ZipFile(exe_filename)
+    try:
+        for info in z.infolist():
+            name = info.filename
+            parts = name.split('/')
+            if len(parts)==3 and parts[2]=='PKG-INFO':
+                if parts[1].endswith('.egg-info'):
+                    prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
+                    break
+            if len(parts)<>2 or not name.endswith('.pth'):
+                continue
+            if name.endswith('-nspkg.pth'):
+                continue
+            if parts[0].upper() in ('PURELIB','PLATLIB'):
+                contents = z.read(name)
+                if sys.version_info >= (3,):
+                    contents = contents.decode()
+                for pth in yield_lines(contents):
+                    pth = pth.strip().replace('\\','/')
+                    if not pth.startswith('import'):
+                        prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
+    finally:
+        z.close()
+    prefixes = [(x.lower(),y) for x, y in prefixes]
+    prefixes.sort(); prefixes.reverse()
+    return prefixes
+
+
+def parse_requirement_arg(spec):
+    try:
+        return Requirement.parse(spec)
+    except ValueError:
+        raise DistutilsError(
+            "Not a URL, existing file, or requirement spec: %r" % (spec,)
+        )
+
+class PthDistributions(Environment):
+    """A .pth file with Distribution paths in it"""
+
+    dirty = False
+
+    def __init__(self, filename, sitedirs=()):
+        self.filename = filename; self.sitedirs=map(normalize_path, sitedirs)
+        self.basedir = normalize_path(os.path.dirname(self.filename))
+        self._load(); Environment.__init__(self, [], None, None)
+        for path in yield_lines(self.paths):
+            map(self.add, find_distributions(path, True))
+
+    def _load(self):
+        self.paths = []
+        saw_import = False
+        seen = dict.fromkeys(self.sitedirs)
+        if os.path.isfile(self.filename):
+            f = open(self.filename,'rt')
+            for line in f:
+                if line.startswith('import'):
+                    saw_import = True
+                    continue
+                path = line.rstrip()
+                self.paths.append(path)
+                if not path.strip() or path.strip().startswith('#'):
+                    continue
+                # skip non-existent paths, in case somebody deleted a package
+                # manually, and duplicate paths as well
+                path = self.paths[-1] = normalize_path(
+                    os.path.join(self.basedir,path)
+                )
+                if not os.path.exists(path) or path in seen:
+                    self.paths.pop()    # skip it
+                    self.dirty = True   # we cleaned up, so we're dirty now :)
+                    continue
+                seen[path] = 1
+            f.close()
+
+        if self.paths and not saw_import:
+            self.dirty = True   # ensure anything we touch has import wrappers
+        while self.paths and not self.paths[-1].strip():
+            self.paths.pop()
+
+    def save(self):
+        """Write changed .pth file back to disk"""
+        if not self.dirty:
+            return
+
+        data = '\n'.join(map(self.make_relative,self.paths))
+        if data:
+            log.debug("Saving %s", self.filename)
+            data = (
+                "import sys; sys.__plen = len(sys.path)\n"
+                "%s\n"
+                "import sys; new=sys.path[sys.__plen:];"
+                " del sys.path[sys.__plen:];"
+                " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
+                " sys.__egginsert = p+len(new)\n"
+            ) % data
+
+            if os.path.islink(self.filename):
+                os.unlink(self.filename)
+            f = open(self.filename,'wt')
+            f.write(data); f.close()
+
+        elif os.path.exists(self.filename):
+            log.debug("Deleting empty %s", self.filename)
+            os.unlink(self.filename)
+
+        self.dirty = False
+
+    def add(self,dist):
+        """Add `dist` to the distribution map"""
+        if (dist.location not in self.paths and (
+                dist.location not in self.sitedirs or
+                dist.location == os.getcwd() #account for '.' being in PYTHONPATH
+                )):
+            self.paths.append(dist.location)
+            self.dirty = True
+        Environment.add(self,dist)
+
+    def remove(self,dist):
+        """Remove `dist` from the distribution map"""
+        while dist.location in self.paths:
+            self.paths.remove(dist.location); self.dirty = True
+        Environment.remove(self,dist)
+
+
+    def make_relative(self,path):
+        npath, last = os.path.split(normalize_path(path))
+        baselen = len(self.basedir)
+        parts = [last]
+        sep = os.altsep=='/' and '/' or os.sep
+        while len(npath)>=baselen:
+            if npath==self.basedir:
+                parts.append(os.curdir)
+                parts.reverse()
+                return sep.join(parts)
+            npath, last = os.path.split(npath)
+            parts.append(last)
+        else:
+            return path
+
+def get_script_header(script_text, executable=sys_executable, wininst=False):
+    """Create a #! line, getting options (if any) from script_text"""
+    from distutils.command.build_scripts import first_line_re
+
+    # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
+    if not isinstance(first_line_re.pattern, str):
+        first_line_re = re.compile(first_line_re.pattern.decode())
+
+    first = (script_text+'\n').splitlines()[0]
+    match = first_line_re.match(first)
+    options = ''
+    if match:
+        options = match.group(1) or ''
+        if options: options = ' '+options
+    if wininst:
+        executable = "python.exe"
+    else:
+        executable = nt_quote_arg(executable)
+    hdr = "#!%(executable)s%(options)s\n" % locals()
+    if not isascii(hdr):
+        # Non-ascii path to sys.executable, use -x to prevent warnings
+        if options:
+            if options.strip().startswith('-'):
+                options = ' -x'+options.strip()[1:]
+            # else: punt, we can't do it, let the warning happen anyway
+        else:
+            options = ' -x'
+    executable = fix_jython_executable(executable, options)
+    hdr = "#!%(executable)s%(options)s\n" % locals()
+    return hdr
+
+def auto_chmod(func, arg, exc):
+    if func is os.remove and os.name=='nt':
+        chmod(arg, stat.S_IWRITE)
+        return func(arg)
+    exc = sys.exc_info()
+    raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg)))
+
+def uncache_zipdir(path):
+    """Ensure that the importer caches dont have stale info for `path`"""
+    from zipimport import _zip_directory_cache as zdc
+    _uncache(path, zdc)
+    _uncache(path, sys.path_importer_cache)
+
+def _uncache(path, cache):
+    if path in cache:
+        del cache[path]
+    else:
+        path = normalize_path(path)
+        for p in cache:
+            if normalize_path(p)==path:
+                del cache[p]
+                return
+
+def is_python(text, filename='<string>'):
+    "Is this string a valid Python script?"
+    try:
+        compile(text, filename, 'exec')
+    except (SyntaxError, TypeError):
+        return False
+    else:
+        return True
+
+def is_sh(executable):
+    """Determine if the specified executable is a .sh (contains a #! line)"""
+    try:
+        fp = open(executable)
+        magic = fp.read(2)
+        fp.close()
+    except (OSError,IOError): return executable
+    return magic == '#!'
+
+def nt_quote_arg(arg):
+    """Quote a command line argument according to Windows parsing rules"""
+
+    result = []
+    needquote = False
+    nb = 0
+
+    needquote = (" " in arg) or ("\t" in arg)
+    if needquote:
+        result.append('"')
+
+    for c in arg:
+        if c == '\\':
+            nb += 1
+        elif c == '"':
+            # double preceding backslashes, then add a \"
+            result.append('\\' * (nb*2) + '\\"')
+            nb = 0
+        else:
+            if nb:
+                result.append('\\' * nb)
+                nb = 0
+            result.append(c)
+
+    if nb:
+        result.append('\\' * nb)
+
+    if needquote:
+        result.append('\\' * nb)    # double the trailing backslashes
+        result.append('"')
+
+    return ''.join(result)
+
+
+
+
+
+
+
+
+
+def is_python_script(script_text, filename):
+    """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
+    """
+    if filename.endswith('.py') or filename.endswith('.pyw'):
+        return True     # extension says it's Python
+    if is_python(script_text, filename):
+        return True     # it's syntactically valid Python
+    if script_text.startswith('#!'):
+        # It begins with a '#!' line, so check if 'python' is in it somewhere
+        return 'python' in script_text.splitlines()[0].lower()
+
+    return False    # Not any Python I can recognize
+
+try:
+    from os import chmod as _chmod
+except ImportError:
+    # Jython compatibility
+    def _chmod(*args): pass
+
+def chmod(path, mode):
+    log.debug("changing mode of %s to %o", path, mode)
+    try:
+        _chmod(path, mode)
+    except os.error, e:
+        log.debug("chmod failed: %s", e)
+
+def fix_jython_executable(executable, options):
+    if sys.platform.startswith('java') and is_sh(executable):
+        # Workaround for Jython is not needed on Linux systems.
+        import java
+        if java.lang.System.getProperty("os.name") == "Linux":
+            return executable
+
+        # Workaround Jython's sys.executable being a .sh (an invalid
+        # shebang line interpreter)
+        if options:
+            # Can't apply the workaround, leave it broken
+            log.warn("WARNING: Unable to adapt shebang line for Jython,"
+                             " the following script is NOT executable\n"
+                     "         see http://bugs.jython.org/issue1112 for"
+                             " more information.")
+        else:
+            return '/usr/bin/env %s' % executable
+    return executable
+
+
+def get_script_args(dist, executable=sys_executable, wininst=False):
+    """Yield write_script() argument tuples for a distribution's entrypoints"""
+    spec = str(dist.as_requirement())
+    header = get_script_header("", executable, wininst)
+    for group in 'console_scripts', 'gui_scripts':
+        for name, ep in dist.get_entry_map(group).items():
+            script_text = (
+                "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n"
+                "__requires__ = %(spec)r\n"
+                "import sys\n"
+                "from pkg_resources import load_entry_point\n"
+                "\n"
+                "if __name__ == '__main__':"
+                "\n"
+                "    sys.exit(\n"
+                "        load_entry_point(%(spec)r, %(group)r, %(name)r)()\n"
+                "    )\n"
+            ) % locals()
+            if sys.platform=='win32' or wininst:
+                # On Windows/wininst, add a .py extension and an .exe launcher
+                if group=='gui_scripts':
+                    ext, launcher = '-script.pyw', 'gui.exe'
+                    old = ['.pyw']
+                    new_header = re.sub('(?i)python.exe','pythonw.exe',header)
+                else:
+                    ext, launcher = '-script.py', 'cli.exe'
+                    old = ['.py','.pyc','.pyo']
+                    new_header = re.sub('(?i)pythonw.exe','python.exe',header)
+                if is_64bit():
+                    launcher = launcher.replace(".", "-64.")
+                else:
+                    launcher = launcher.replace(".", "-32.")
+                if os.path.exists(new_header[2:-1]) or sys.platform!='win32':
+                    hdr = new_header
+                else:
+                    hdr = header
+                yield (name+ext, hdr+script_text, 't', [name+x for x in old])
+                yield (
+                    name+'.exe', resource_string('setuptools', launcher),
+                    'b' # write in binary mode
+                )
+            else:
+                # On other platforms, we assume the right thing to do is to
+                # just write the stub with no extension.
+                yield (name, header+script_text)
+
+def rmtree(path, ignore_errors=False, onerror=auto_chmod):
+    """Recursively delete a directory tree.
+
+    This code is taken from the Python 2.4 version of 'shutil', because
+    the 2.3 version doesn't really work right.
+    """
+    if ignore_errors:
+        def onerror(*args):
+            pass
+    elif onerror is None:
+        def onerror(*args):
+            raise
+    names = []
+    try:
+        names = os.listdir(path)
+    except os.error, err:
+        onerror(os.listdir, path, sys.exc_info())
+    for name in names:
+        fullname = os.path.join(path, name)
+        try:
+            mode = os.lstat(fullname).st_mode
+        except os.error:
+            mode = 0
+        if stat.S_ISDIR(mode):
+            rmtree(fullname, ignore_errors, onerror)
+        else:
+            try:
+                os.remove(fullname)
+            except os.error, err:
+                onerror(os.remove, fullname, sys.exc_info())
+    try:
+        os.rmdir(path)
+    except os.error:
+        onerror(os.rmdir, path, sys.exc_info())
+
+def current_umask():
+    tmp = os.umask(022)
+    os.umask(tmp)
+    return tmp
+
+def bootstrap():
+    # This function is called when setuptools*.egg is run using /bin/sh
+    import setuptools; argv0 = os.path.dirname(setuptools.__path__[0])
+    sys.argv[0] = argv0; sys.argv.append(argv0); main()
+
+def main(argv=None, **kw):
+    from setuptools import setup
+    from setuptools.dist import Distribution
+    import distutils.core
+
+    USAGE = """\
+usage: %(script)s [options] requirement_or_url ...
+   or: %(script)s --help
+"""
+
+    def gen_usage (script_name):
+        script = os.path.basename(script_name)
+        return USAGE % vars()
+
+    def with_ei_usage(f):
+        old_gen_usage = distutils.core.gen_usage
+        try:
+            distutils.core.gen_usage = gen_usage
+            return f()
+        finally:
+            distutils.core.gen_usage = old_gen_usage
+
+    class DistributionWithoutHelpCommands(Distribution):
+        common_usage = ""
+
+        def _show_help(self,*args,**kw):
+            with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
+
+        def find_config_files(self):
+            files = Distribution.find_config_files(self)
+            if 'setup.cfg' in files:
+                files.remove('setup.cfg')
+            return files
+
+    if argv is None:
+        argv = sys.argv[1:]
+
+    with_ei_usage(lambda:
+        setup(
+            script_args = ['-q','easy_install', '-v']+argv,
+            script_name = sys.argv[0] or 'easy_install',
+            distclass=DistributionWithoutHelpCommands, **kw
+        )
+    )
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/egg_info.py b/vendor/distribute-0.6.35/setuptools/command/egg_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c2ea0cca340f5d5bf066993e7d305c357e7d2c8
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/egg_info.py
@@ -0,0 +1,486 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+# This module should be kept compatible with Python 2.3
+import os, re, sys
+from setuptools import Command
+from distutils.errors import *
+from distutils import log
+from setuptools.command.sdist import sdist
+from distutils.util import convert_path
+from distutils.filelist import FileList as _FileList
+from pkg_resources import parse_requirements, safe_name, parse_version, \
+    safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
+from sdist import walk_revctrl
+
+class egg_info(Command):
+    description = "create a distribution's .egg-info directory"
+
+    user_options = [
+        ('egg-base=', 'e', "directory containing .egg-info directories"
+                           " (default: top of the source tree)"),
+        ('tag-svn-revision', 'r',
+            "Add subversion revision ID to version number"),
+        ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+        ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+        ('no-svn-revision', 'R',
+            "Don't add subversion revision ID [default]"),
+        ('no-date', 'D', "Don't include date stamp [default]"),
+    ]
+
+    boolean_options = ['tag-date', 'tag-svn-revision']
+    negative_opt = {'no-svn-revision': 'tag-svn-revision',
+                    'no-date': 'tag-date'}
+
+
+
+
+
+
+
+    def initialize_options(self):
+        self.egg_name = None
+        self.egg_version = None
+        self.egg_base = None
+        self.egg_info = None
+        self.tag_build = None
+        self.tag_svn_revision = 0
+        self.tag_date = 0
+        self.broken_egg_info = False
+        self.vtags = None
+
+    def save_version_info(self, filename):
+        from setopt import edit_config
+        edit_config(
+            filename,
+            {'egg_info':
+                {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()}
+            }
+        )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def finalize_options (self):
+        self.egg_name = safe_name(self.distribution.get_name())
+        self.vtags = self.tags()
+        self.egg_version = self.tagged_version()
+
+        try:
+            list(
+                parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
+            )
+        except ValueError:
+            raise DistutilsOptionError(
+                "Invalid distribution name or version syntax: %s-%s" %
+                (self.egg_name,self.egg_version)
+            )
+
+        if self.egg_base is None:
+            dirs = self.distribution.package_dir
+            self.egg_base = (dirs or {}).get('',os.curdir)
+
+        self.ensure_dirname('egg_base')
+        self.egg_info = to_filename(self.egg_name)+'.egg-info'
+        if self.egg_base != os.curdir:
+            self.egg_info = os.path.join(self.egg_base, self.egg_info)
+        if '-' in self.egg_name: self.check_broken_egg_info()
+
+        # Set package version for the benefit of dumber commands
+        # (e.g. sdist, bdist_wininst, etc.)
+        #
+        self.distribution.metadata.version = self.egg_version
+
+        # If we bootstrapped around the lack of a PKG-INFO, as might be the
+        # case in a fresh checkout, make sure that any special tags get added
+        # to the version info
+        #
+        pd = self.distribution._patched_dist
+        if pd is not None and pd.key==self.egg_name.lower():
+            pd._version = self.egg_version
+            pd._parsed_version = parse_version(self.egg_version)
+            self.distribution._patched_dist = None
+
+
+    def write_or_delete_file(self, what, filename, data, force=False):
+        """Write `data` to `filename` or delete if empty
+
+        If `data` is non-empty, this routine is the same as ``write_file()``.
+        If `data` is empty but not ``None``, this is the same as calling
+        ``delete_file(filename)`.  If `data` is ``None``, then this is a no-op
+        unless `filename` exists, in which case a warning is issued about the
+        orphaned file (if `force` is false), or deleted (if `force` is true).
+        """
+        if data:
+            self.write_file(what, filename, data)
+        elif os.path.exists(filename):
+            if data is None and not force:
+                log.warn(
+                    "%s not set in setup(), but %s exists", what, filename
+                )
+                return
+            else:
+                self.delete_file(filename)
+
+    def write_file(self, what, filename, data):
+        """Write `data` to `filename` (if not a dry run) after announcing it
+
+        `what` is used in a log message to identify what is being written
+        to the file.
+        """
+        log.info("writing %s to %s", what, filename)
+        if sys.version_info >= (3,):
+            data = data.encode("utf-8")
+        if not self.dry_run:
+            f = open(filename, 'wb')
+            f.write(data)
+            f.close()
+
+    def delete_file(self, filename):
+        """Delete `filename` (if not a dry run) after announcing it"""
+        log.info("deleting %s", filename)
+        if not self.dry_run:
+            os.unlink(filename)
+
+    def tagged_version(self):
+        version = self.distribution.get_version()
+        # egg_info may be called more than once for a distribution,
+        # in which case the version string already contains all tags.
+        if self.vtags and version.endswith(self.vtags):
+            return safe_version(version)
+        return safe_version(version + self.vtags)
+
+    def run(self):
+        self.mkpath(self.egg_info)
+        installer = self.distribution.fetch_build_egg
+        for ep in iter_entry_points('egg_info.writers'):
+            writer = ep.load(installer=installer)
+            writer(self, ep.name, os.path.join(self.egg_info,ep.name))
+
+        # Get rid of native_libs.txt if it was put there by older bdist_egg
+        nl = os.path.join(self.egg_info, "native_libs.txt")
+        if os.path.exists(nl):
+            self.delete_file(nl)
+
+        self.find_sources()
+
+    def tags(self):
+        version = ''
+        if self.tag_build:
+            version+=self.tag_build
+        if self.tag_svn_revision and (
+            os.path.exists('.svn') or os.path.exists('PKG-INFO')
+        ):  version += '-r%s' % self.get_svn_revision()
+        if self.tag_date:
+            import time; version += time.strftime("-%Y%m%d")
+        return version
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def get_svn_revision(self):
+        revision = 0
+        urlre = re.compile('url="([^"]+)"')
+        revre = re.compile('committed-rev="(\d+)"')
+
+        for base,dirs,files in os.walk(os.curdir):
+            if '.svn' not in dirs:
+                dirs[:] = []
+                continue    # no sense walking uncontrolled subdirs
+            dirs.remove('.svn')
+            f = open(os.path.join(base,'.svn','entries'))
+            data = f.read()
+            f.close()
+
+            if data.startswith('10') or data.startswith('9') or data.startswith('8'):
+                data = map(str.splitlines,data.split('\n\x0c\n'))
+                del data[0][0]  # get rid of the '8' or '9' or '10'
+                dirurl = data[0][3]
+                localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0])
+            elif data.startswith('<?xml'):
+                dirurl = urlre.search(data).group(1)    # get repository URL
+                localrev = max([int(m.group(1)) for m in revre.finditer(data)]+[0])
+            else:
+                log.warn("unrecognized .svn/entries format; skipping %s", base)
+                dirs[:] = []
+                continue
+            if base==os.curdir:
+                base_url = dirurl+'/'   # save the root url
+            elif not dirurl.startswith(base_url):
+                dirs[:] = []
+                continue    # not part of the same svn tree, skip it
+            revision = max(revision, localrev)
+
+        return str(revision or get_pkg_info_revision())
+
+
+
+
+
+
+
+    def find_sources(self):
+        """Generate SOURCES.txt manifest file"""
+        manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
+        mm = manifest_maker(self.distribution)
+        mm.manifest = manifest_filename
+        mm.run()
+        self.filelist = mm.filelist
+
+    def check_broken_egg_info(self):
+        bei = self.egg_name+'.egg-info'
+        if self.egg_base != os.curdir:
+            bei = os.path.join(self.egg_base, bei)
+        if os.path.exists(bei):
+            log.warn(
+                "-"*78+'\n'
+                "Note: Your current .egg-info directory has a '-' in its name;"
+                '\nthis will not work correctly with "setup.py develop".\n\n'
+                'Please rename %s to %s to correct this problem.\n'+'-'*78,
+                bei, self.egg_info
+            )
+            self.broken_egg_info = self.egg_info
+            self.egg_info = bei     # make it work for now
+
+class FileList(_FileList):
+    """File list that accepts only existing, platform-independent paths"""
+
+    def append(self, item):
+        if item.endswith('\r'):     # Fix older sdists built on Windows
+            item = item[:-1]
+        path = convert_path(item)
+
+        if sys.version_info >= (3,):
+            try:
+                if os.path.exists(path) or os.path.exists(path.encode('utf-8')):
+                    self.files.append(path)
+            except UnicodeEncodeError:
+                # Accept UTF-8 filenames even if LANG=C
+                if os.path.exists(path.encode('utf-8')):
+                    self.files.append(path)
+                else:
+                    log.warn("'%s' not %s encodable -- skipping", path,
+                        sys.getfilesystemencoding())
+        else:
+            if os.path.exists(path):
+                self.files.append(path)
+
+
+
+
+
+
+
+
+class manifest_maker(sdist):
+
+    template = "MANIFEST.in"
+
+    def initialize_options (self):
+        self.use_defaults = 1
+        self.prune = 1
+        self.manifest_only = 1
+        self.force_manifest = 1
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        self.filelist = FileList()
+        if not os.path.exists(self.manifest):
+            self.write_manifest()   # it must exist so it'll get in the list
+        self.filelist.findall()
+        self.add_defaults()
+        if os.path.exists(self.template):
+            self.read_template()
+        self.prune_file_list()
+        self.filelist.sort()
+        self.filelist.remove_duplicates()
+        self.write_manifest()
+
+    def write_manifest (self):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        # The manifest must be UTF-8 encodable. See #303.
+        if sys.version_info >= (3,):
+            files = []
+            for file in self.filelist.files:
+                try:
+                    file.encode("utf-8")
+                except UnicodeEncodeError:
+                    log.warn("'%s' not UTF-8 encodable -- skipping" % file)
+                else:
+                    files.append(file)
+            self.filelist.files = files
+
+        files = self.filelist.files
+        if os.sep!='/':
+            files = [f.replace(os.sep,'/') for f in files]
+        self.execute(write_file, (self.manifest, files),
+                     "writing manifest file '%s'" % self.manifest)
+
+    def warn(self, msg):    # suppress missing-file warnings from sdist
+        if not msg.startswith("standard file not found:"):
+            sdist.warn(self, msg)
+
+    def add_defaults(self):
+        sdist.add_defaults(self)
+        self.filelist.append(self.template)
+        self.filelist.append(self.manifest)
+        rcfiles = list(walk_revctrl())
+        if rcfiles:
+            self.filelist.extend(rcfiles)
+        elif os.path.exists(self.manifest):
+            self.read_manifest()
+        ei_cmd = self.get_finalized_command('egg_info')
+        self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
+
+    def prune_file_list (self):
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+        sep = re.escape(os.sep)
+        self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
+
+
+def write_file (filename, contents):
+    """Create a file with the specified name and write 'contents' (a
+    sequence of strings without line terminators) to it.
+    """
+    contents = "\n".join(contents)
+    if sys.version_info >= (3,):
+        contents = contents.encode("utf-8")
+    f = open(filename, "wb")        # always write POSIX-style manifest
+    f.write(contents)
+    f.close()
+
+
+
+
+
+
+
+
+
+
+
+
+
+def write_pkg_info(cmd, basename, filename):
+    log.info("writing %s", filename)
+    if not cmd.dry_run:
+        metadata = cmd.distribution.metadata
+        metadata.version, oldver = cmd.egg_version, metadata.version
+        metadata.name, oldname   = cmd.egg_name, metadata.name
+        try:
+            # write unescaped data to PKG-INFO, so older pkg_resources
+            # can still parse it
+            metadata.write_pkg_info(cmd.egg_info)
+        finally:
+            metadata.name, metadata.version = oldname, oldver
+
+        safe = getattr(cmd.distribution,'zip_safe',None)
+        import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+def warn_depends_obsolete(cmd, basename, filename):
+    if os.path.exists(filename):
+        log.warn(
+            "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
+            "Use the install_requires/extras_require setup() args instead."
+        )
+
+
+def write_requirements(cmd, basename, filename):
+    dist = cmd.distribution
+    data = ['\n'.join(yield_lines(dist.install_requires or ()))]
+    for extra,reqs in (dist.extras_require or {}).items():
+        data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
+    cmd.write_or_delete_file("requirements", filename, ''.join(data))
+
+def write_toplevel_names(cmd, basename, filename):
+    pkgs = dict.fromkeys(
+        [k.split('.',1)[0]
+            for k in cmd.distribution.iter_distribution_names()
+        ]
+    )
+    cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
+
+
+
+def overwrite_arg(cmd, basename, filename):
+    write_arg(cmd, basename, filename, True)
+
+def write_arg(cmd, basename, filename, force=False):
+    argname = os.path.splitext(basename)[0]
+    value = getattr(cmd.distribution, argname, None)
+    if value is not None:
+        value = '\n'.join(value)+'\n'
+    cmd.write_or_delete_file(argname, filename, value, force)
+
+def write_entries(cmd, basename, filename):
+    ep = cmd.distribution.entry_points
+
+    if isinstance(ep,basestring) or ep is None:
+        data = ep
+    elif ep is not None:
+        data = []
+        for section, contents in ep.items():
+            if not isinstance(contents,basestring):
+                contents = EntryPoint.parse_group(section, contents)
+                contents = '\n'.join(map(str,contents.values()))
+            data.append('[%s]\n%s\n\n' % (section,contents))
+        data = ''.join(data)
+
+    cmd.write_or_delete_file('entry points', filename, data, True)
+
+def get_pkg_info_revision():
+    # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
+    # a subversion revision
+    #
+    if os.path.exists('PKG-INFO'):
+        f = open('PKG-INFO','rU')
+        for line in f:
+            match = re.match(r"Version:.*-r(\d+)\s*$", line)
+            if match:
+                return int(match.group(1))
+        f.close()
+    return 0
+
+
+
+#
diff --git a/vendor/distribute-0.6.35/setuptools/command/install.py b/vendor/distribute-0.6.35/setuptools/command/install.py
new file mode 100644
index 0000000000000000000000000000000000000000..247c4f259c976db16d0a7b0e55bd69a75704c62d
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/install.py
@@ -0,0 +1,124 @@
+import setuptools, sys, glob
+from distutils.command.install import install as _install
+from distutils.errors import DistutilsArgError
+
+class install(_install):
+    """Use easy_install to install the package, w/dependencies"""
+
+    user_options = _install.user_options + [
+        ('old-and-unmanageable', None, "Try not to use this!"),
+        ('single-version-externally-managed', None,
+            "used by system package builders to create 'flat' eggs"),
+    ]
+    boolean_options = _install.boolean_options + [
+        'old-and-unmanageable', 'single-version-externally-managed',
+    ]
+    new_commands = [
+        ('install_egg_info', lambda self: True),
+        ('install_scripts',  lambda self: True),
+    ]
+    _nc = dict(new_commands)
+
+    def initialize_options(self):
+        _install.initialize_options(self)
+        self.old_and_unmanageable = None
+        self.single_version_externally_managed = None
+        self.no_compile = None  # make DISTUTILS_DEBUG work right!
+
+    def finalize_options(self):
+        _install.finalize_options(self)
+        if self.root:
+            self.single_version_externally_managed = True
+        elif self.single_version_externally_managed:
+            if not self.root and not self.record:
+                raise DistutilsArgError(
+                    "You must specify --record or --root when building system"
+                    " packages"
+                )
+
+    def handle_extra_path(self):
+        if self.root or self.single_version_externally_managed:
+            # explicit backward-compatibility mode, allow extra_path to work
+            return _install.handle_extra_path(self)
+
+        # Ignore extra_path when installing an egg (or being run by another
+        # command without --root or --single-version-externally-managed
+        self.path_file = None
+        self.extra_dirs = ''
+
+
+    def run(self):
+        # Explicit request for old-style install?  Just do it
+        if self.old_and_unmanageable or self.single_version_externally_managed:
+            return _install.run(self)
+
+        # Attempt to detect whether we were called from setup() or by another
+        # command.  If we were called by setup(), our caller will be the
+        # 'run_command' method in 'distutils.dist', and *its* caller will be
+        # the 'run_commands' method.  If we were called any other way, our
+        # immediate caller *might* be 'run_command', but it won't have been
+        # called by 'run_commands'.  This is slightly kludgy, but seems to
+        # work.
+        #
+        caller = sys._getframe(2)
+        caller_module = caller.f_globals.get('__name__','')
+        caller_name = caller.f_code.co_name
+
+        if caller_module != 'distutils.dist' or caller_name!='run_commands':
+            # We weren't called from the command line or setup(), so we
+            # should run in backward-compatibility mode to support bdist_*
+            # commands.
+            _install.run(self)
+        else:
+            self.do_egg_install()
+
+
+
+
+
+
+    def do_egg_install(self):
+
+        easy_install = self.distribution.get_command_class('easy_install')
+
+        cmd = easy_install(
+            self.distribution, args="x", root=self.root, record=self.record,
+        )
+        cmd.ensure_finalized()  # finalize before bdist_egg munges install cmd
+        cmd.always_copy_from = '.'  # make sure local-dir eggs get installed
+
+        # pick up setup-dir .egg files only: no .egg-info
+        cmd.package_index.scan(glob.glob('*.egg'))
+
+        self.run_command('bdist_egg')
+        args = [self.distribution.get_command_obj('bdist_egg').egg_output]
+
+        if setuptools.bootstrap_install_from:
+            # Bootstrap self-installation of setuptools
+            args.insert(0, setuptools.bootstrap_install_from)
+
+        cmd.args = args
+        cmd.run()
+        setuptools.bootstrap_install_from = None
+
+# XXX Python 3.1 doesn't see _nc if this is inside the class
+install.sub_commands = [
+        cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
+    ] + install.new_commands
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
diff --git a/vendor/distribute-0.6.35/setuptools/command/install_egg_info.py b/vendor/distribute-0.6.35/setuptools/command/install_egg_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..f44b34b555573061c7d0940e4031cee0414e99ec
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/install_egg_info.py
@@ -0,0 +1,125 @@
+from setuptools import Command
+from setuptools.archive_util import unpack_archive
+from distutils import log, dir_util
+import os, shutil, pkg_resources
+
+class install_egg_info(Command):
+    """Install an .egg-info directory for the package"""
+
+    description = "Install an .egg-info directory for the package"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+    ]
+
+    def initialize_options(self):
+        self.install_dir = None
+
+    def finalize_options(self):
+        self.set_undefined_options('install_lib',('install_dir','install_dir'))
+        ei_cmd = self.get_finalized_command("egg_info")
+        basename = pkg_resources.Distribution(
+            None, None, ei_cmd.egg_name, ei_cmd.egg_version
+        ).egg_name()+'.egg-info'
+        self.source = ei_cmd.egg_info
+        self.target = os.path.join(self.install_dir, basename)
+        self.outputs = [self.target]
+
+    def run(self):
+        self.run_command('egg_info')
+        target = self.target
+        if os.path.isdir(self.target) and not os.path.islink(self.target):
+            dir_util.remove_tree(self.target, dry_run=self.dry_run)
+        elif os.path.exists(self.target):
+            self.execute(os.unlink,(self.target,),"Removing "+self.target)
+        if not self.dry_run:
+            pkg_resources.ensure_directory(self.target)
+        self.execute(self.copytree, (),
+            "Copying %s to %s" % (self.source, self.target)
+        )
+        self.install_namespaces()
+
+    def get_outputs(self):
+        return self.outputs
+
+    def copytree(self):
+        # Copy the .egg-info tree to site-packages
+        def skimmer(src,dst):
+            # filter out source-control directories; note that 'src' is always
+            # a '/'-separated path, regardless of platform.  'dst' is a
+            # platform-specific path.
+            for skip in '.svn/','CVS/':
+                if src.startswith(skip) or '/'+skip in src:
+                    return None
+            self.outputs.append(dst)
+            log.debug("Copying %s to %s", src, dst)
+            return dst
+        unpack_archive(self.source, self.target, skimmer)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def install_namespaces(self):
+        nsp = self._get_all_ns_packages()
+        if not nsp: return
+        filename,ext = os.path.splitext(self.target)
+        filename += '-nspkg.pth'; self.outputs.append(filename)
+        log.info("Installing %s",filename)
+        if not self.dry_run:
+            f = open(filename,'wt')
+            for pkg in nsp:
+                # ensure pkg is not a unicode string under Python 2.7
+                pkg = str(pkg)
+                pth = tuple(pkg.split('.'))
+                trailer = '\n'
+                if '.' in pkg:
+                    trailer = (
+                        "; m and setattr(sys.modules[%r], %r, m)\n"
+                        % ('.'.join(pth[:-1]), pth[-1])
+                    )
+                f.write(
+                    "import sys,types,os; "
+                    "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
+                        "*%(pth)r); "
+                    "ie = os.path.exists(os.path.join(p,'__init__.py')); "
+                    "m = not ie and "
+                        "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
+                    "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
+                    "(p not in mp) and mp.append(p)%(trailer)s"
+                    % locals()
+                )
+            f.close()
+
+    def _get_all_ns_packages(self):
+        nsp = {}
+        for pkg in self.distribution.namespace_packages or []:
+            pkg = pkg.split('.')
+            while pkg:
+                nsp['.'.join(pkg)] = 1
+                pkg.pop()
+        nsp=list(nsp)
+        nsp.sort()  # set up shorter names first
+        return nsp
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/install_lib.py b/vendor/distribute-0.6.35/setuptools/command/install_lib.py
new file mode 100644
index 0000000000000000000000000000000000000000..82afa1421bed5d8b892ca0ddeb7a6282fba5146d
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/install_lib.py
@@ -0,0 +1,82 @@
+from distutils.command.install_lib import install_lib as _install_lib
+import os
+
+class install_lib(_install_lib):
+    """Don't add compiled flags to filenames of non-Python files"""
+
+    def _bytecode_filenames (self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            if not py_file.endswith('.py'):
+                continue
+            if self.compile:
+                bytecode_files.append(py_file + "c")
+            if self.optimize > 0:
+                bytecode_files.append(py_file + "o")
+
+        return bytecode_files
+
+    def run(self):
+        self.build()
+        outfiles = self.install()
+        if outfiles is not None:
+            # always compile, in case we have any extension stubs to deal with
+            self.byte_compile(outfiles)
+
+    def get_exclusions(self):
+        exclude = {}
+        nsp = self.distribution.namespace_packages
+
+        if (nsp and self.get_finalized_command('install')
+               .single_version_externally_managed
+        ):
+            for pkg in nsp:
+                parts = pkg.split('.')
+                while parts:
+                    pkgdir = os.path.join(self.install_dir, *parts)
+                    for f in '__init__.py', '__init__.pyc', '__init__.pyo':
+                        exclude[os.path.join(pkgdir,f)] = 1
+                    parts.pop()
+        return exclude
+
+    def copy_tree(
+        self, infile, outfile,
+        preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+    ):
+        assert preserve_mode and preserve_times and not preserve_symlinks
+        exclude = self.get_exclusions()
+
+        if not exclude:
+            return _install_lib.copy_tree(self, infile, outfile)
+
+        # Exclude namespace package __init__.py* files from the output
+
+        from setuptools.archive_util import unpack_directory
+        from distutils import log
+
+        outfiles = []
+
+        def pf(src, dst):
+            if dst in exclude:
+                log.warn("Skipping installation of %s (namespace package)",dst)
+                return False
+
+            log.info("copying %s -> %s", src, os.path.dirname(dst))
+            outfiles.append(dst)
+            return dst
+
+        unpack_directory(infile, outfile, pf)
+        return outfiles
+
+    def get_outputs(self):
+        outputs = _install_lib.get_outputs(self)
+        exclude = self.get_exclusions()
+        if exclude:
+            return [f for f in outputs if f not in exclude]
+        return outputs
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/install_scripts.py b/vendor/distribute-0.6.35/setuptools/command/install_scripts.py
new file mode 100644
index 0000000000000000000000000000000000000000..8245603597854c264873fd4b229d037b0b95f448
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/install_scripts.py
@@ -0,0 +1,54 @@
+from distutils.command.install_scripts import install_scripts \
+     as _install_scripts
+from pkg_resources import Distribution, PathMetadata, ensure_directory
+import os
+from distutils import log
+
+class install_scripts(_install_scripts):
+    """Do normal script install, plus any egg_info wrapper scripts"""
+
+    def initialize_options(self):
+        _install_scripts.initialize_options(self)
+        self.no_ep = False
+
+    def run(self):
+        from setuptools.command.easy_install import get_script_args
+        from setuptools.command.easy_install import sys_executable
+
+        self.run_command("egg_info")
+        if self.distribution.scripts:
+            _install_scripts.run(self)  # run first to set up self.outfiles
+        else:
+            self.outfiles = []
+        if self.no_ep:
+            # don't install entry point scripts into .egg file!
+            return
+
+        ei_cmd = self.get_finalized_command("egg_info")
+        dist = Distribution(
+            ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+            ei_cmd.egg_name, ei_cmd.egg_version,
+        )
+        bs_cmd = self.get_finalized_command('build_scripts')
+        executable = getattr(bs_cmd,'executable',sys_executable)
+        is_wininst = getattr(
+            self.get_finalized_command("bdist_wininst"), '_is_running', False
+        )
+        for args in get_script_args(dist, executable, is_wininst):
+            self.write_script(*args)
+
+    def write_script(self, script_name, contents, mode="t", *ignored):
+        """Write an executable file to the scripts directory"""
+        from setuptools.command.easy_install import chmod, current_umask
+        log.info("Installing %s script to %s", script_name, self.install_dir)
+        target = os.path.join(self.install_dir, script_name)
+        self.outfiles.append(target)
+
+        mask = current_umask()
+        if not self.dry_run:
+            ensure_directory(target)
+            f = open(target,"w"+mode)
+            f.write(contents)
+            f.close()
+            chmod(target, 0777-mask)
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/register.py b/vendor/distribute-0.6.35/setuptools/command/register.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b2e085907ecaf1dd6251fd83572f93567c0864c
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/register.py
@@ -0,0 +1,10 @@
+from distutils.command.register import register as _register
+
+class register(_register):
+    __doc__ = _register.__doc__
+
+    def run(self):
+        # Make sure that we are using valid current name/version info
+        self.run_command('egg_info')
+        _register.run(self)
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/rotate.py b/vendor/distribute-0.6.35/setuptools/command/rotate.py
new file mode 100644
index 0000000000000000000000000000000000000000..11b6eae82b6a8bbb28f3908f4f9989840745d58c
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/rotate.py
@@ -0,0 +1,82 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+
+class rotate(Command):
+    """Delete older distributions"""
+
+    description = "delete older distributions, keeping N newest files"
+    user_options = [
+        ('match=',    'm', "patterns to match (required)"),
+        ('dist-dir=', 'd', "directory where the distributions are"),
+        ('keep=',     'k', "number of matching distributions to keep"),
+    ]
+
+    boolean_options = []
+
+    def initialize_options(self):
+        self.match = None
+        self.dist_dir = None
+        self.keep = None
+
+    def finalize_options(self):
+        if self.match is None:
+            raise DistutilsOptionError(
+                "Must specify one or more (comma-separated) match patterns "
+                "(e.g. '.zip' or '.egg')"
+            )
+        if self.keep is None:
+            raise DistutilsOptionError("Must specify number of files to keep")           
+        try:
+            self.keep = int(self.keep)
+        except ValueError:
+            raise DistutilsOptionError("--keep must be an integer")
+        if isinstance(self.match, basestring):
+            self.match = [
+                convert_path(p.strip()) for p in self.match.split(',')
+            ]
+        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+
+    def run(self):
+        self.run_command("egg_info")
+        from glob import glob
+        for pattern in self.match:
+            pattern = self.distribution.get_name()+'*'+pattern
+            files = glob(os.path.join(self.dist_dir,pattern))
+            files = [(os.path.getmtime(f),f) for f in files]
+            files.sort()
+            files.reverse()
+
+            log.info("%d file(s) matching %s", len(files), pattern)
+            files = files[self.keep:]
+            for (t,f) in files:
+                log.info("Deleting %s", f)
+                if not self.dry_run:
+                    os.unlink(f)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/saveopts.py b/vendor/distribute-0.6.35/setuptools/command/saveopts.py
new file mode 100644
index 0000000000000000000000000000000000000000..1180a440c920fd6ce1a0e55bb803b031fc301336
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/saveopts.py
@@ -0,0 +1,25 @@
+import distutils, os
+from setuptools import Command
+from setuptools.command.setopt import edit_config, option_base
+
+class saveopts(option_base):
+    """Save command-line options to a file"""
+
+    description = "save supplied options to setup.cfg or other config file"
+
+    def run(self):
+        dist = self.distribution
+        commands = dist.command_options.keys()
+        settings = {}
+
+        for cmd in commands:
+
+            if cmd=='saveopts':
+                continue    # don't save our own options!
+
+            for opt,(src,val) in dist.get_option_dict(cmd).items():
+                if src=="command line":
+                    settings.setdefault(cmd,{})[opt] = val
+
+        edit_config(self.filename, settings, self.dry_run)
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/sdist.py b/vendor/distribute-0.6.35/setuptools/command/sdist.py
new file mode 100644
index 0000000000000000000000000000000000000000..2fa3771aa6f27ca930367279e18026820f477b9e
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/sdist.py
@@ -0,0 +1,313 @@
+from distutils.command.sdist import sdist as _sdist
+from distutils.util import convert_path
+from distutils import log
+import os, re, sys, pkg_resources
+from glob import glob
+
+READMES = ('README', 'README.rst', 'README.txt')
+
+entities = [
+    ("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
+    ("&amp;", "&")
+]
+
+def unescape(data):
+    for old,new in entities:
+        data = data.replace(old,new)
+    return data
+
+def re_finder(pattern, postproc=None):
+    def find(dirname, filename):
+        f = open(filename,'rU')
+        data = f.read()
+        f.close()
+        for match in pattern.finditer(data):
+            path = match.group(1)
+            if postproc:
+                path = postproc(path)
+            yield joinpath(dirname,path)
+    return find
+
+def joinpath(prefix,suffix):
+    if not prefix:
+        return suffix
+    return os.path.join(prefix,suffix)
+
+
+
+
+
+
+
+
+
+
+def walk_revctrl(dirname=''):
+    """Find all files under revision control"""
+    for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
+        for item in ep.load()(dirname):
+            yield item
+
+def _default_revctrl(dirname=''):
+    for path, finder in finders:
+        path = joinpath(dirname,path)
+        if os.path.isfile(path):
+            for path in finder(dirname,path):
+                if os.path.isfile(path):
+                    yield path
+                elif os.path.isdir(path):
+                    for item in _default_revctrl(path):
+                        yield item
+
+def externals_finder(dirname, filename):
+    """Find any 'svn:externals' directories"""
+    found = False
+    f = open(filename,'rt')
+    for line in iter(f.readline, ''):    # can't use direct iter!
+        parts = line.split()
+        if len(parts)==2:
+            kind,length = parts
+            data = f.read(int(length))
+            if kind=='K' and data=='svn:externals':
+                found = True
+            elif kind=='V' and found:
+                f.close()
+                break
+    else:
+        f.close()
+        return
+
+    for line in data.splitlines():
+        parts = line.split()
+        if parts:
+            yield joinpath(dirname, parts[0])
+
+
+entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
+
+def entries_finder(dirname, filename):
+    f = open(filename,'rU')
+    data = f.read()
+    f.close()
+    if data.startswith('10') or data.startswith('9') or data.startswith('8'):
+        for record in map(str.splitlines, data.split('\n\x0c\n')[1:]):
+            # subversion 1.6/1.5/1.4
+            if not record or len(record)>=6 and record[5]=="delete":
+                continue    # skip deleted
+            yield joinpath(dirname, record[0])
+    elif data.startswith('<?xml'):
+        for match in entries_pattern.finditer(data):
+            yield joinpath(dirname,unescape(match.group(1)))
+    else:
+        log.warn("unrecognized .svn/entries format in %s", os.path.abspath(dirname))
+
+
+finders = [
+    (convert_path('CVS/Entries'),
+        re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
+    (convert_path('.svn/entries'), entries_finder),
+    (convert_path('.svn/dir-props'), externals_finder),
+    (convert_path('.svn/dir-prop-base'), externals_finder),  # svn 1.4
+]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class sdist(_sdist):
+    """Smart sdist that finds anything supported by revision control"""
+
+    user_options = [
+        ('formats=', None,
+         "formats for source distribution (comma-separated list)"),
+        ('keep-temp', 'k',
+         "keep the distribution tree around after creating " +
+         "archive file(s)"),
+        ('dist-dir=', 'd',
+         "directory to put the source distribution archive(s) in "
+         "[default: dist]"),
+        ]
+
+    negative_opt = {}
+
+    def run(self):
+        self.run_command('egg_info')
+        ei_cmd = self.get_finalized_command('egg_info')
+        self.filelist = ei_cmd.filelist
+        self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
+        self.check_readme()
+
+        # Run sub commands
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        # Call check_metadata only if no 'check' command
+        # (distutils <= 2.6)
+        import distutils.command
+        if 'check' not in distutils.command.__all__:
+            self.check_metadata()
+            
+        self.make_distribution()
+
+        dist_files = getattr(self.distribution,'dist_files',[])
+        for file in self.archive_files:
+            data = ('sdist', '', file)
+            if data not in dist_files:
+                dist_files.append(data)
+
+    def add_defaults(self):
+        standards = [READMES,
+                     self.distribution.script_name]
+        for fn in standards:
+            if isinstance(fn, tuple):
+                alts = fn
+                got_it = 0
+                for fn in alts:
+                    if os.path.exists(fn):
+                        got_it = 1
+                        self.filelist.append(fn)
+                        break
+
+                if not got_it:
+                    self.warn("standard file not found: should have one of " +
+                              ', '.join(alts))
+            else:
+                if os.path.exists(fn):
+                    self.filelist.append(fn)
+                else:
+                    self.warn("standard file '%s' not found" % fn)
+
+        optional = ['test/test*.py', 'setup.cfg']
+        for pattern in optional:
+            files = filter(os.path.isfile, glob(pattern))
+            if files:
+                self.filelist.extend(files)
+
+        # getting python files
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            self.filelist.extend(build_py.get_source_files())
+            # This functionality is incompatible with include_package_data, and
+            # will in fact create an infinite recursion if include_package_data
+            # is True.  Use of include_package_data will imply that
+            # distutils-style automatic handling of package_data is disabled
+            if not self.distribution.include_package_data:
+                for _, src_dir, _, filenames in build_py.data_files:
+                    self.filelist.extend([os.path.join(src_dir, filename)
+                                          for filename in filenames])
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            self.filelist.extend(build_ext.get_source_files())
+
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.filelist.extend(build_clib.get_source_files())
+
+        if self.distribution.has_scripts():
+            build_scripts = self.get_finalized_command('build_scripts')
+            self.filelist.extend(build_scripts.get_source_files())
+
+    def __read_template_hack(self):
+        # This grody hack closes the template file (MANIFEST.in) if an
+        #  exception occurs during read_template.
+        # Doing so prevents an error when easy_install attempts to delete the
+        #  file.
+        try:
+            _sdist.read_template(self)
+        except:
+            sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
+            raise
+    # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
+    #  has been fixed, so only override the method if we're using an earlier
+    #  Python.
+    if (
+            sys.version_info < (2,7,2)
+            or (3,0) <= sys.version_info < (3,1,4)
+            or (3,2) <= sys.version_info < (3,2,1)
+        ):
+        read_template = __read_template_hack
+
+    def check_readme(self):
+        for f in READMES:
+            if os.path.exists(f):
+                return
+        else:
+            self.warn(
+                "standard file not found: should have one of " +', '.join(READMES)
+            )
+
+
+    def make_release_tree(self, base_dir, files):
+        _sdist.make_release_tree(self, base_dir, files)
+
+        # Save any egg_info command line options used to create this sdist
+        dest = os.path.join(base_dir, 'setup.cfg')
+        if hasattr(os,'link') and os.path.exists(dest):
+            # unlink and re-copy, since it might be hard-linked, and
+            # we don't want to change the source version
+            os.unlink(dest)
+            self.copy_file('setup.cfg', dest)
+
+        self.get_finalized_command('egg_info').save_version_info(dest)
+
+    def _manifest_is_not_generated(self):
+        # check for special comment used in 2.7.1 and higher
+        if not os.path.isfile(self.manifest):
+            return False
+
+        fp = open(self.manifest, 'rbU')
+        try:
+            first_line = fp.readline()
+        finally:
+            fp.close()
+        return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode()
+
+    def read_manifest(self):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        log.info("reading manifest file '%s'", self.manifest)
+        manifest = open(self.manifest, 'rbU')
+        for line in manifest:
+            # The manifest must contain UTF-8. See #303.
+            if sys.version_info >= (3,):
+                try:
+                    line = line.decode('UTF-8')
+                except UnicodeDecodeError:
+                    log.warn("%r not UTF-8 decodable -- skipping" % line)
+                    continue
+            # ignore comments and blank lines
+            line = line.strip()
+            if line.startswith('#') or not line:
+                continue
+            self.filelist.append(line)
+        manifest.close()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
diff --git a/vendor/distribute-0.6.35/setuptools/command/setopt.py b/vendor/distribute-0.6.35/setuptools/command/setopt.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbf3a94ec126d98283956bd2eba5514000557f39
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/setopt.py
@@ -0,0 +1,164 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+    """Get the filename of the distutils, local, global, or per-user config
+
+    `kind` must be one of "local", "global", or "user"
+    """
+    if kind=='local':
+        return 'setup.cfg'
+    if kind=='global':
+        return os.path.join(
+            os.path.dirname(distutils.__file__),'distutils.cfg'
+        )
+    if kind=='user':
+        dot = os.name=='posix' and '.' or ''
+        return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+    raise ValueError(
+        "config_file() type must be 'local', 'global', or 'user'", kind
+    )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def edit_config(filename, settings, dry_run=False):
+    """Edit a configuration file to include `settings`
+
+    `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+    command/section name.  A ``None`` value means to delete the entire section,
+    while a dictionary lists settings to be changed or deleted in that section.
+    A setting of ``None`` means to delete that setting.
+    """
+    from ConfigParser import RawConfigParser
+    log.debug("Reading configuration from %s", filename)
+    opts = RawConfigParser()
+    opts.read([filename])
+    for section, options in settings.items():
+        if options is None:
+            log.info("Deleting section [%s] from %s", section, filename)
+            opts.remove_section(section)
+        else:
+            if not opts.has_section(section):
+                log.debug("Adding new section [%s] to %s", section, filename)
+                opts.add_section(section)
+            for option,value in options.items():
+                if value is None:
+                    log.debug("Deleting %s.%s from %s",
+                        section, option, filename
+                    )
+                    opts.remove_option(section,option)
+                    if not opts.options(section):
+                        log.info("Deleting empty [%s] section from %s",
+                                  section, filename)
+                        opts.remove_section(section)
+                else:
+                    log.debug(
+                        "Setting %s.%s to %r in %s",
+                        section, option, value, filename
+                    )
+                    opts.set(section,option,value)
+
+    log.info("Writing %s", filename)
+    if not dry_run:
+        f = open(filename,'w'); opts.write(f); f.close()
+
+class option_base(Command):
+    """Abstract base class for commands that mess with config files"""
+    
+    user_options = [
+        ('global-config', 'g',
+                 "save options to the site-wide distutils.cfg file"),
+        ('user-config', 'u',
+                 "save options to the current user's pydistutils.cfg file"),
+        ('filename=', 'f',
+                 "configuration file to use (default=setup.cfg)"),
+    ]
+
+    boolean_options = [
+        'global-config', 'user-config',
+    ]    
+
+    def initialize_options(self):
+        self.global_config = None
+        self.user_config   = None
+        self.filename = None
+
+    def finalize_options(self):
+        filenames = []
+        if self.global_config:
+            filenames.append(config_file('global'))
+        if self.user_config:
+            filenames.append(config_file('user'))
+        if self.filename is not None:
+            filenames.append(self.filename)
+        if not filenames:
+            filenames.append(config_file('local'))
+        if len(filenames)>1:
+            raise DistutilsOptionError(
+                "Must specify only one configuration file option",
+                filenames
+            )
+        self.filename, = filenames    
+
+
+
+
+class setopt(option_base):
+    """Save command-line options to a file"""
+
+    description = "set an option in setup.cfg or another config file"
+
+    user_options = [
+        ('command=', 'c', 'command to set an option for'),
+        ('option=',  'o',  'option to set'),
+        ('set-value=',   's', 'value of the option'),
+        ('remove',   'r', 'remove (unset) the value'), 
+    ] + option_base.user_options
+
+    boolean_options = option_base.boolean_options + ['remove']
+
+    def initialize_options(self):
+        option_base.initialize_options(self)
+        self.command = None
+        self.option = None
+        self.set_value = None
+        self.remove = None
+
+    def finalize_options(self):
+        option_base.finalize_options(self)
+        if self.command is None or self.option is None:
+            raise DistutilsOptionError("Must specify --command *and* --option")
+        if self.set_value is None and not self.remove:
+            raise DistutilsOptionError("Must specify --set-value or --remove")
+
+    def run(self):
+        edit_config(
+            self.filename, {
+                self.command: {self.option.replace('-','_'):self.set_value}
+            },
+            self.dry_run
+        )
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/test.py b/vendor/distribute-0.6.35/setuptools/command/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..a02ac1424068b39efad4a180ede916f1fe183d79
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/test.py
@@ -0,0 +1,198 @@
+from setuptools import Command
+from distutils.errors import DistutilsOptionError
+import sys
+from pkg_resources import *
+from pkg_resources import _namespace_packages
+from unittest import TestLoader, main
+
+class ScanningLoader(TestLoader):
+
+    def loadTestsFromModule(self, module):
+        """Return a suite of all tests cases contained in the given module
+
+        If the module is a package, load tests from all the modules in it.
+        If the module has an ``additional_tests`` function, call it and add
+        the return value to the tests.
+        """
+        tests = []
+        if module.__name__!='setuptools.tests.doctest':  # ugh
+            tests.append(TestLoader.loadTestsFromModule(self,module))
+
+        if hasattr(module, "additional_tests"):
+            tests.append(module.additional_tests())
+
+        if hasattr(module, '__path__'):
+            for file in resource_listdir(module.__name__, ''):
+                if file.endswith('.py') and file!='__init__.py':
+                    submodule = module.__name__+'.'+file[:-3]
+                else:
+                    if resource_exists(
+                        module.__name__, file+'/__init__.py'
+                    ):
+                        submodule = module.__name__+'.'+file
+                    else:
+                        continue
+                tests.append(self.loadTestsFromName(submodule))
+
+        if len(tests)!=1:
+            return self.suiteClass(tests)
+        else:
+            return tests[0] # don't create a nested suite for only one return
+
+
+class test(Command):
+
+    """Command to run unit tests after in-place build"""
+
+    description = "run unit tests after in-place build"
+
+    user_options = [
+        ('test-module=','m', "Run 'test_suite' in specified module"),
+        ('test-suite=','s',
+            "Test suite to run (e.g. 'some_module.test_suite')"),
+    ]
+
+    def initialize_options(self):
+        self.test_suite = None
+        self.test_module = None
+        self.test_loader = None
+
+
+    def finalize_options(self):
+
+        if self.test_suite is None:
+            if self.test_module is None:
+                self.test_suite = self.distribution.test_suite
+            else:
+                self.test_suite = self.test_module+".test_suite"
+        elif self.test_module:
+            raise DistutilsOptionError(
+                "You may specify a module or a suite, but not both"
+            )
+
+        self.test_args = [self.test_suite]
+
+        if self.verbose:
+            self.test_args.insert(0,'--verbose')
+        if self.test_loader is None:
+            self.test_loader = getattr(self.distribution,'test_loader',None)
+        if self.test_loader is None:
+            self.test_loader = "setuptools.command.test:ScanningLoader"
+
+
+
+    def with_project_on_sys_path(self, func):
+        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
+            # If we run 2to3 we can not do this inplace:
+
+            # Ensure metadata is up-to-date
+            self.reinitialize_command('build_py', inplace=0)
+            self.run_command('build_py')
+            bpy_cmd = self.get_finalized_command("build_py")
+            build_path = normalize_path(bpy_cmd.build_lib)
+
+            # Build extensions
+            self.reinitialize_command('egg_info', egg_base=build_path)
+            self.run_command('egg_info')
+
+            self.reinitialize_command('build_ext', inplace=0)
+            self.run_command('build_ext')
+        else:
+            # Without 2to3 inplace works fine:
+            self.run_command('egg_info')
+
+            # Build extensions in-place
+            self.reinitialize_command('build_ext', inplace=1)
+            self.run_command('build_ext')
+
+        ei_cmd = self.get_finalized_command("egg_info")
+
+        old_path = sys.path[:]
+        old_modules = sys.modules.copy()
+
+        try:
+            sys.path.insert(0, normalize_path(ei_cmd.egg_base))
+            working_set.__init__()
+            add_activation_listener(lambda dist: dist.activate())
+            require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
+            func()
+        finally:
+            sys.path[:] = old_path
+            sys.modules.clear()
+            sys.modules.update(old_modules)
+            working_set.__init__()
+
+
+    def run(self):
+        if self.distribution.install_requires:
+            self.distribution.fetch_build_eggs(self.distribution.install_requires)
+        if self.distribution.tests_require:
+            self.distribution.fetch_build_eggs(self.distribution.tests_require)
+
+        if self.test_suite:
+            cmd = ' '.join(self.test_args)
+            if self.dry_run:
+                self.announce('skipping "unittest %s" (dry run)' % cmd)
+            else:
+                self.announce('running "unittest %s"' % cmd)
+                self.with_project_on_sys_path(self.run_tests)
+
+
+    def run_tests(self):
+        import unittest
+
+        # Purge modules under test from sys.modules. The test loader will
+        # re-import them from the build location. Required when 2to3 is used
+        # with namespace packages.
+        if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
+            module = self.test_args[-1].split('.')[0]
+            if module in _namespace_packages:
+                del_modules = []
+                if module in sys.modules:
+                    del_modules.append(module)
+                module += '.'
+                for name in sys.modules:
+                    if name.startswith(module):
+                        del_modules.append(name)
+                map(sys.modules.__delitem__, del_modules)
+
+        loader_ep = EntryPoint.parse("x="+self.test_loader)
+        loader_class = loader_ep.load(require=False)
+        cks = loader_class()
+        unittest.main(
+            None, None, [unittest.__file__]+self.test_args,
+            testLoader = cks
+        )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/upload.py b/vendor/distribute-0.6.35/setuptools/command/upload.py
new file mode 100644
index 0000000000000000000000000000000000000000..21b9615c42ac752723e2223b66e40b63cf9f9521
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/upload.py
@@ -0,0 +1,185 @@
+"""distutils.command.upload
+
+Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
+
+from distutils.errors import *
+from distutils.core import Command
+from distutils.spawn import spawn
+from distutils import log
+try:
+    from hashlib import md5
+except ImportError:
+    from md5 import md5
+import os
+import socket
+import platform
+import ConfigParser
+import httplib
+import base64
+import urlparse
+import cStringIO as StringIO
+
+class upload(Command):
+
+    description = "upload binary package to PyPI"
+
+    DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+
+    user_options = [
+        ('repository=', 'r',
+         "url of repository [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         'display full response text from server'),
+        ('sign', 's',
+         'sign files to upload using gpg'),
+        ('identity=', 'i', 'GPG identity used to sign files'),
+        ]
+    boolean_options = ['show-response', 'sign']
+
+    def initialize_options(self):
+        self.username = ''
+        self.password = ''
+        self.repository = ''
+        self.show_response = 0
+        self.sign = False
+        self.identity = None
+
+    def finalize_options(self):
+        if self.identity and not self.sign:
+            raise DistutilsOptionError(
+                "Must use --sign for --identity to have meaning"
+            )
+        if os.environ.has_key('HOME'):
+            rc = os.path.join(os.environ['HOME'], '.pypirc')
+            if os.path.exists(rc):
+                self.announce('Using PyPI login from %s' % rc)
+                config = ConfigParser.ConfigParser({
+                        'username':'',
+                        'password':'',
+                        'repository':''})
+                config.read(rc)
+                if not self.repository:
+                    self.repository = config.get('server-login', 'repository')
+                if not self.username:
+                    self.username = config.get('server-login', 'username')
+                if not self.password:
+                    self.password = config.get('server-login', 'password')
+        if not self.repository:
+            self.repository = self.DEFAULT_REPOSITORY
+
+    def run(self):
+        if not self.distribution.dist_files:
+            raise DistutilsOptionError("No dist file created in earlier command")
+        for command, pyversion, filename in self.distribution.dist_files:
+            self.upload_file(command, pyversion, filename)
+
+    def upload_file(self, command, pyversion, filename):
+        # Sign if requested
+        if self.sign:
+            gpg_args = ["gpg", "--detach-sign", "-a", filename]
+            if self.identity:
+                gpg_args[2:2] = ["--local-user", self.identity]
+            spawn(gpg_args,
+                  dry_run=self.dry_run)
+
+        # Fill in the data
+        f = open(filename,'rb')
+        content = f.read()
+        f.close()
+        basename = os.path.basename(filename)
+        comment = ''
+        if command=='bdist_egg' and self.distribution.has_ext_modules():
+            comment = "built on %s" % platform.platform(terse=1)
+        data = {
+            ':action':'file_upload',
+            'protocol_version':'1',
+            'name':self.distribution.get_name(),
+            'version':self.distribution.get_version(),
+            'content':(basename,content),
+            'filetype':command,
+            'pyversion':pyversion,
+            'md5_digest':md5(content).hexdigest(),
+            }
+        if command == 'bdist_rpm':
+            dist, version, id = platform.dist()
+            if dist:
+                comment = 'built for %s %s' % (dist, version)
+        elif command == 'bdist_dumb':
+            comment = 'built for %s' % platform.platform(terse=1)
+        data['comment'] = comment
+
+        if self.sign:
+            asc_file = open(filename + ".asc")
+            data['gpg_signature'] = (os.path.basename(filename) + ".asc", asc_file.read())
+            asc_file.close()
+
+        # set up the authentication
+        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
+
+        # Build up the MIME payload for the POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        body = StringIO.StringIO()
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if type(value) != type([]):
+                value = [value]
+            for value in value:
+                if type(value) is tuple:
+                    fn = ';filename="%s"' % value[0]
+                    value = value[1]
+                else:
+                    fn = ""
+                value = str(value)
+                body.write(sep_boundary)
+                body.write('\nContent-Disposition: form-data; name="%s"'%key)
+                body.write(fn)
+                body.write("\n\n")
+                body.write(value)
+                if value and value[-1] == '\r':
+                    body.write('\n')  # write an extra newline (lurve Macs)
+        body.write(end_boundary)
+        body.write("\n")
+        body = body.getvalue()
+
+        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
+
+        # build the Request
+        # We can't use urllib2 since we need to send the Basic
+        # auth right with the first request
+        schema, netloc, url, params, query, fragments = \
+            urlparse.urlparse(self.repository)
+        assert not params and not query and not fragments
+        if schema == 'http':
+            http = httplib.HTTPConnection(netloc)
+        elif schema == 'https':
+            http = httplib.HTTPSConnection(netloc)
+        else:
+            raise AssertionError, "unsupported schema "+schema
+
+        data = ''
+        loglevel = log.INFO
+        try:
+            http.connect()
+            http.putrequest("POST", url)
+            http.putheader('Content-type',
+                           'multipart/form-data; boundary=%s'%boundary)
+            http.putheader('Content-length', str(len(body)))
+            http.putheader('Authorization', auth)
+            http.endheaders()
+            http.send(body)
+        except socket.error, e:
+            self.announce(str(e), log.ERROR)
+            return
+
+        r = http.getresponse()
+        if r.status == 200:
+            self.announce('Server response (%s): %s' % (r.status, r.reason),
+                          log.INFO)
+        else:
+            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+                          log.ERROR)
+        if self.show_response:
+            print '-'*75, r.read(), '-'*75
+
diff --git a/vendor/distribute-0.6.35/setuptools/command/upload_docs.py b/vendor/distribute-0.6.35/setuptools/command/upload_docs.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d5a7445121ed3ec7b8bcb02420ddcdc1bb24276
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/command/upload_docs.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+"""upload_docs
+
+Implements a Distutils 'upload_docs' subcommand (upload documentation to
+PyPI's packages.python.org).
+"""
+
+import os
+import socket
+import zipfile
+import httplib
+import urlparse
+import tempfile
+import sys
+import shutil
+
+from base64 import standard_b64encode
+from pkg_resources import iter_entry_points
+
+from distutils import log
+from distutils.errors import DistutilsOptionError
+
+try:
+    from distutils.command.upload import upload
+except ImportError:
+    from setuptools.command.upload import upload
+
+
+# This is not just a replacement for byte literals
+# but works as a general purpose encoder
+def b(s, encoding='utf-8'):
+    if isinstance(s, unicode):
+        return s.encode(encoding)
+    return s
+
+
+class upload_docs(upload):
+
+    description = 'Upload documentation to PyPI'
+
+    user_options = [
+        ('repository=', 'r',
+         "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
+        ('show-response', None,
+         'display full response text from server'),
+        ('upload-dir=', None, 'directory to upload'),
+        ]
+    boolean_options = upload.boolean_options
+
+    def has_sphinx(self):
+        if self.upload_dir is None:
+            for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
+                return True
+
+    sub_commands = [('build_sphinx', has_sphinx)]
+
+    def initialize_options(self):
+        upload.initialize_options(self)
+        self.upload_dir = None
+        self.target_dir = None
+
+    def finalize_options(self):
+        upload.finalize_options(self)
+        if self.upload_dir is None:
+            if self.has_sphinx():
+                build_sphinx = self.get_finalized_command('build_sphinx')
+                self.target_dir = build_sphinx.builder_target_dir
+            else:
+                build = self.get_finalized_command('build')
+                self.target_dir = os.path.join(build.build_base, 'docs')
+        else:
+            self.ensure_dirname('upload_dir')
+            self.target_dir = self.upload_dir
+        self.announce('Using upload directory %s' % self.target_dir)
+
+    def create_zipfile(self, filename):
+        zip_file = zipfile.ZipFile(filename, "w")
+        try:
+            self.mkpath(self.target_dir)  # just in case
+            for root, dirs, files in os.walk(self.target_dir):
+                if root == self.target_dir and not files:
+                    raise DistutilsOptionError(
+                        "no files found in upload directory '%s'"
+                        % self.target_dir)
+                for name in files:
+                    full = os.path.join(root, name)
+                    relative = root[len(self.target_dir):].lstrip(os.path.sep)
+                    dest = os.path.join(relative, name)
+                    zip_file.write(full, dest)
+        finally:
+            zip_file.close()
+
+    def run(self):
+        # Run sub commands
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        tmp_dir = tempfile.mkdtemp()
+        name = self.distribution.metadata.get_name()
+        zip_file = os.path.join(tmp_dir, "%s.zip" % name)
+        try:
+            self.create_zipfile(zip_file)
+            self.upload_file(zip_file)
+        finally:
+            shutil.rmtree(tmp_dir)
+
+    def upload_file(self, filename):
+        f = open(filename, 'rb')
+        content = f.read()
+        f.close()
+        meta = self.distribution.metadata
+        data = {
+            ':action': 'doc_upload',
+            'name': meta.get_name(),
+            'content': (os.path.basename(filename), content),
+        }
+        # set up the authentication
+        credentials = b(self.username + ':' + self.password)
+        credentials = standard_b64encode(credentials)
+        if sys.version_info >= (3,):
+            credentials = credentials.decode('ascii')
+        auth = "Basic " + credentials
+
+        # Build up the MIME payload for the POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = b('\n--') + b(boundary)
+        end_boundary = sep_boundary + b('--')
+        body = []
+        for key, values in data.iteritems():
+            title = '\nContent-Disposition: form-data; name="%s"' % key
+            # handle multiple entries for the same name
+            if type(values) != type([]):
+                values = [values]
+            for value in values:
+                if type(value) is tuple:
+                    title += '; filename="%s"' % value[0]
+                    value = value[1]
+                else:
+                    value = b(value)
+                body.append(sep_boundary)
+                body.append(b(title))
+                body.append(b("\n\n"))
+                body.append(value)
+                if value and value[-1:] == b('\r'):
+                    body.append(b('\n'))  # write an extra newline (lurve Macs)
+        body.append(end_boundary)
+        body.append(b("\n"))
+        body = b('').join(body)
+
+        self.announce("Submitting documentation to %s" % (self.repository),
+                      log.INFO)
+
+        # build the Request
+        # We can't use urllib2 since we need to send the Basic
+        # auth right with the first request
+        schema, netloc, url, params, query, fragments = \
+            urlparse.urlparse(self.repository)
+        assert not params and not query and not fragments
+        if schema == 'http':
+            conn = httplib.HTTPConnection(netloc)
+        elif schema == 'https':
+            conn = httplib.HTTPSConnection(netloc)
+        else:
+            raise AssertionError("unsupported schema "+schema)
+
+        data = ''
+        loglevel = log.INFO
+        try:
+            conn.connect()
+            conn.putrequest("POST", url)
+            conn.putheader('Content-type',
+                           'multipart/form-data; boundary=%s'%boundary)
+            conn.putheader('Content-length', str(len(body)))
+            conn.putheader('Authorization', auth)
+            conn.endheaders()
+            conn.send(body)
+        except socket.error, e:
+            self.announce(str(e), log.ERROR)
+            return
+
+        r = conn.getresponse()
+        if r.status == 200:
+            self.announce('Server response (%s): %s' % (r.status, r.reason),
+                          log.INFO)
+        elif r.status == 301:
+            location = r.getheader('Location')
+            if location is None:
+                location = 'http://packages.python.org/%s/' % meta.get_name()
+            self.announce('Upload successful. Visit %s' % location,
+                          log.INFO)
+        else:
+            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+                          log.ERROR)
+        if self.show_response:
+            print '-'*75, r.read(), '-'*75
diff --git a/vendor/distribute-0.6.35/setuptools/depends.py b/vendor/distribute-0.6.35/setuptools/depends.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b7b343760a38c7b3d979c09a9d747be146879b5
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/depends.py
@@ -0,0 +1,246 @@
+from __future__ import generators
+import sys, imp, marshal
+from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
+from distutils.version import StrictVersion, LooseVersion
+
+__all__ = [
+    'Require', 'find_module', 'get_module_constant', 'extract_constant'
+]
+
+class Require:
+    """A prerequisite to building or installing a distribution"""
+
+    def __init__(self,name,requested_version,module,homepage='',
+        attribute=None,format=None
+    ):
+
+        if format is None and requested_version is not None:
+            format = StrictVersion
+
+        if format is not None:
+            requested_version = format(requested_version)
+            if attribute is None:
+                attribute = '__version__'
+
+        self.__dict__.update(locals())
+        del self.self
+
+
+    def full_name(self):
+        """Return full package/distribution name, w/version"""
+        if self.requested_version is not None:
+            return '%s-%s' % (self.name,self.requested_version)
+        return self.name
+
+
+    def version_ok(self,version):
+        """Is 'version' sufficiently up-to-date?"""
+        return self.attribute is None or self.format is None or \
+            str(version)<>"unknown" and version >= self.requested_version
+
+
+    def get_version(self, paths=None, default="unknown"):
+
+        """Get version number of installed module, 'None', or 'default'
+
+        Search 'paths' for module.  If not found, return 'None'.  If found,
+        return the extracted version attribute, or 'default' if no version
+        attribute was specified, or the value cannot be determined without
+        importing the module.  The version is formatted according to the
+        requirement's version format (if any), unless it is 'None' or the
+        supplied 'default'.
+        """
+
+        if self.attribute is None:
+            try:
+                f,p,i = find_module(self.module,paths)
+                if f: f.close()
+                return default
+            except ImportError:
+                return None
+
+        v = get_module_constant(self.module,self.attribute,default,paths)
+
+        if v is not None and v is not default and self.format is not None:
+            return self.format(v)
+
+        return v
+
+
+    def is_present(self,paths=None):
+        """Return true if dependency is present on 'paths'"""
+        return self.get_version(paths) is not None
+
+
+    def is_current(self,paths=None):
+        """Return true if dependency is present and up-to-date on 'paths'"""
+        version = self.get_version(paths)
+        if version is None:
+            return False
+        return self.version_ok(version)
+
+
+def _iter_code(code):
+
+    """Yield '(op,arg)' pair for each operation in code object 'code'"""
+
+    from array import array
+    from dis import HAVE_ARGUMENT, EXTENDED_ARG
+
+    bytes = array('b',code.co_code)
+    eof = len(code.co_code)
+
+    ptr = 0
+    extended_arg = 0
+
+    while ptr<eof:
+
+        op = bytes[ptr]
+
+        if op>=HAVE_ARGUMENT:
+
+            arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
+            ptr += 3
+
+            if op==EXTENDED_ARG:
+                extended_arg = arg * 65536L
+                continue
+
+        else:
+            arg = None
+            ptr += 1
+
+        yield op,arg
+
+
+
+
+
+
+
+
+
+
+def find_module(module, paths=None):
+    """Just like 'imp.find_module()', but with package support"""
+
+    parts = module.split('.')
+
+    while parts:
+        part = parts.pop(0)
+        f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
+
+        if kind==PKG_DIRECTORY:
+            parts = parts or ['__init__']
+            paths = [path]
+
+        elif parts:
+            raise ImportError("Can't find %r in %s" % (parts,module))
+
+    return info
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def get_module_constant(module, symbol, default=-1, paths=None):
+
+    """Find 'module' by searching 'paths', and extract 'symbol'
+
+    Return 'None' if 'module' does not exist on 'paths', or it does not define
+    'symbol'.  If the module defines 'symbol' as a constant, return the
+    constant.  Otherwise, return 'default'."""
+
+    try:
+        f, path, (suffix,mode,kind) = find_module(module,paths)
+    except ImportError:
+        # Module doesn't exist
+        return None
+
+    try:
+        if kind==PY_COMPILED:
+            f.read(8)   # skip magic & date
+            code = marshal.load(f)
+        elif kind==PY_FROZEN:
+            code = imp.get_frozen_object(module)
+        elif kind==PY_SOURCE:
+            code = compile(f.read(), path, 'exec')
+        else:
+            # Not something we can parse; we'll have to import it.  :(
+            if module not in sys.modules:
+                imp.load_module(module,f,path,(suffix,mode,kind))
+            return getattr(sys.modules[module],symbol,None)
+
+    finally:
+        if f:
+            f.close()
+
+    return extract_constant(code,symbol,default)
+
+
+
+
+
+
+
+
+def extract_constant(code,symbol,default=-1):
+    """Extract the constant value of 'symbol' from 'code'
+
+    If the name 'symbol' is bound to a constant value by the Python code
+    object 'code', return that value.  If 'symbol' is bound to an expression,
+    return 'default'.  Otherwise, return 'None'.
+
+    Return value is based on the first assignment to 'symbol'.  'symbol' must
+    be a global, or at least a non-"fast" local in the code block.  That is,
+    only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+    must be present in 'code.co_names'.
+    """
+
+    if symbol not in code.co_names:
+        # name's not there, can't possibly be an assigment
+        return None
+
+    name_idx = list(code.co_names).index(symbol)
+
+    STORE_NAME = 90
+    STORE_GLOBAL = 97
+    LOAD_CONST = 100
+
+    const = default
+
+    for op, arg in _iter_code(code):
+
+        if op==LOAD_CONST:
+            const = code.co_consts[arg]
+        elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
+            return const
+        else:
+            const = default
+            
+if sys.platform.startswith('java') or sys.platform == 'cli':
+    # XXX it'd be better to test assertions about bytecode instead...
+    del extract_constant, get_module_constant
+    __all__.remove('extract_constant')
+    __all__.remove('get_module_constant')
+
+
diff --git a/vendor/distribute-0.6.35/setuptools/dist.py b/vendor/distribute-0.6.35/setuptools/dist.py
new file mode 100644
index 0000000000000000000000000000000000000000..998a4dbe8b963427d23780584e1dca7e35e61fcf
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/dist.py
@@ -0,0 +1,855 @@
+__all__ = ['Distribution']
+
+import re
+from distutils.core import Distribution as _Distribution
+from setuptools.depends import Require
+from setuptools.command.install import install
+from setuptools.command.sdist import sdist
+from setuptools.command.install_lib import install_lib
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
+import os, distutils.log
+
+def _get_unpatched(cls):
+    """Protect against re-patching the distutils if reloaded
+
+    Also ensures that no other distutils extension monkeypatched the distutils
+    first.
+    """
+    while cls.__module__.startswith('setuptools'):
+        cls, = cls.__bases__
+    if not cls.__module__.startswith('distutils'):
+        raise AssertionError(
+            "distutils has already been patched by %r" % cls
+        )
+    return cls
+
+_Distribution = _get_unpatched(_Distribution)
+
+sequence = tuple, list
+
+def check_importable(dist, attr, value):
+    try:
+        ep = pkg_resources.EntryPoint.parse('x='+value)
+        assert not ep.extras
+    except (TypeError,ValueError,AttributeError,AssertionError):
+        raise DistutilsSetupError(
+            "%r must be importable 'module:attrs' string (got %r)"
+            % (attr,value)
+        )
+
+
+def assert_string_list(dist, attr, value):
+    """Verify that value is a string list or None"""
+    try:
+        assert ''.join(value)!=value
+    except (TypeError,ValueError,AttributeError,AssertionError):
+        raise DistutilsSetupError(
+            "%r must be a list of strings (got %r)" % (attr,value)
+        )
+
+def check_nsp(dist, attr, value):
+    """Verify that namespace packages are valid"""
+    assert_string_list(dist,attr,value)
+    for nsp in value:
+        if not dist.has_contents_for(nsp):
+            raise DistutilsSetupError(
+                "Distribution contains no modules or packages for " +
+                "namespace package %r" % nsp
+            )
+        if '.' in nsp:
+            parent = '.'.join(nsp.split('.')[:-1])
+            if parent not in value:
+                distutils.log.warn(
+                    "%r is declared as a package namespace, but %r is not:"
+                    " please correct this in setup.py", nsp, parent
+                )
+
+def check_extras(dist, attr, value):
+    """Verify that extras_require mapping is valid"""
+    try:
+        for k,v in value.items():
+            list(pkg_resources.parse_requirements(v))
+    except (TypeError,ValueError,AttributeError):
+        raise DistutilsSetupError(
+            "'extras_require' must be a dictionary whose values are "
+            "strings or lists of strings containing valid project/version "
+            "requirement specifiers."
+        )
+
+
+
+
+def assert_bool(dist, attr, value):
+    """Verify that value is True, False, 0, or 1"""
+    if bool(value) != value:
+        raise DistutilsSetupError(
+            "%r must be a boolean value (got %r)" % (attr,value)
+        )
+def check_requirements(dist, attr, value):
+    """Verify that install_requires is a valid requirements list"""
+    try:
+        list(pkg_resources.parse_requirements(value))
+    except (TypeError,ValueError):
+        raise DistutilsSetupError(
+            "%r must be a string or list of strings "
+            "containing valid project/version requirement specifiers" % (attr,)
+        )
+def check_entry_points(dist, attr, value):
+    """Verify that entry_points map is parseable"""
+    try:
+        pkg_resources.EntryPoint.parse_map(value)
+    except ValueError, e:
+        raise DistutilsSetupError(e)
+
+def check_test_suite(dist, attr, value):
+    if not isinstance(value,basestring):
+        raise DistutilsSetupError("test_suite must be a string")
+
+def check_package_data(dist, attr, value):
+    """Verify that value is a dictionary of package names to glob lists"""
+    if isinstance(value,dict):
+        for k,v in value.items():
+            if not isinstance(k,str): break
+            try: iter(v)
+            except TypeError:
+                break
+        else:
+            return
+    raise DistutilsSetupError(
+        attr+" must be a dictionary mapping package names to lists of "
+        "wildcard patterns"
+    )
+
+class Distribution(_Distribution):
+    """Distribution with support for features, tests, and package data
+
+    This is an enhanced version of 'distutils.dist.Distribution' that
+    effectively adds the following new optional keyword arguments to 'setup()':
+
+     'install_requires' -- a string or sequence of strings specifying project
+        versions that the distribution requires when installed, in the format
+        used by 'pkg_resources.require()'.  They will be installed
+        automatically when the package is installed.  If you wish to use
+        packages that are not available in PyPI, or want to give your users an
+        alternate download location, you can add a 'find_links' option to the
+        '[easy_install]' section of your project's 'setup.cfg' file, and then
+        setuptools will scan the listed web pages for links that satisfy the
+        requirements.
+
+     'extras_require' -- a dictionary mapping names of optional "extras" to the
+        additional requirement(s) that using those extras incurs. For example,
+        this::
+
+            extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
+
+        indicates that the distribution can optionally provide an extra
+        capability called "reST", but it can only be used if docutils and
+        reSTedit are installed.  If the user installs your package using
+        EasyInstall and requests one of your extras, the corresponding
+        additional requirements will be installed if needed.
+
+     'features' -- a dictionary mapping option names to 'setuptools.Feature'
+        objects.  Features are a portion of the distribution that can be
+        included or excluded based on user options, inter-feature dependencies,
+        and availability on the current system.  Excluded features are omitted
+        from all setup commands, including source and binary distributions, so
+        you can create multiple distributions from the same source tree.
+        Feature names should be valid Python identifiers, except that they may
+        contain the '-' (minus) sign.  Features can be included or excluded
+        via the command line options '--with-X' and '--without-X', where 'X' is
+        the name of the feature.  Whether a feature is included by default, and
+        whether you are allowed to control this from the command line, is
+        determined by the Feature object.  See the 'Feature' class for more
+        information.
+
+     'test_suite' -- the name of a test suite to run for the 'test' command.
+        If the user runs 'python setup.py test', the package will be installed,
+        and the named test suite will be run.  The format is the same as
+        would be used on a 'unittest.py' command line.  That is, it is the
+        dotted name of an object to import and call to generate a test suite.
+
+     'package_data' -- a dictionary mapping package names to lists of filenames
+        or globs to use to find data files contained in the named packages.
+        If the dictionary has filenames or globs listed under '""' (the empty
+        string), those names will be searched for in every package, in addition
+        to any names for the specific package.  Data files found using these
+        names/globs will be installed along with the package, in the same
+        location as the package.  Note that globs are allowed to reference
+        the contents of non-package subdirectories, as long as you use '/' as
+        a path separator.  (Globs are automatically converted to
+        platform-specific paths at runtime.)
+
+    In addition to these new keywords, this class also has several new methods
+    for manipulating the distribution's contents.  For example, the 'include()'
+    and 'exclude()' methods can be thought of as in-place add and subtract
+    commands that add or remove packages, modules, extensions, and so on from
+    the distribution.  They are used by the feature subsystem to configure the
+    distribution for the included and excluded features.
+    """
+
+    _patched_dist = None
+
+    def patch_missing_pkg_info(self, attrs):
+        # Fake up a replacement for the data that would normally come from
+        # PKG-INFO, but which might not yet be built if this is a fresh
+        # checkout.
+        #
+        if not attrs or 'name' not in attrs or 'version' not in attrs:
+            return
+        key = pkg_resources.safe_name(str(attrs['name'])).lower()
+        dist = pkg_resources.working_set.by_key.get(key)
+        if dist is not None and not dist.has_metadata('PKG-INFO'):
+            dist._version = pkg_resources.safe_version(str(attrs['version']))
+            self._patched_dist = dist
+
+    def __init__ (self, attrs=None):
+        have_package_data = hasattr(self, "package_data")
+        if not have_package_data:
+            self.package_data = {}
+        self.require_features = []
+        self.features = {}
+        self.dist_files = []
+        self.src_root = attrs and attrs.pop("src_root", None)
+        self.patch_missing_pkg_info(attrs)
+        # Make sure we have any eggs needed to interpret 'attrs'
+        if attrs is not None:
+            self.dependency_links = attrs.pop('dependency_links', [])
+            assert_string_list(self,'dependency_links',self.dependency_links)
+        if attrs and 'setup_requires' in attrs:
+            self.fetch_build_eggs(attrs.pop('setup_requires'))
+        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+            if not hasattr(self,ep.name):
+                setattr(self,ep.name,None)
+        _Distribution.__init__(self,attrs)
+        if isinstance(self.metadata.version, (int,long,float)):
+            # Some people apparently take "version number" too literally :)
+            self.metadata.version = str(self.metadata.version)
+
+    def parse_command_line(self):
+        """Process features after parsing command line options"""
+        result = _Distribution.parse_command_line(self)
+        if self.features:
+            self._finalize_features()
+        return result
+
+    def _feature_attrname(self,name):
+        """Convert feature name to corresponding option attribute name"""
+        return 'with_'+name.replace('-','_')
+
+    def fetch_build_eggs(self, requires):
+        """Resolve pre-setup requirements"""
+        from pkg_resources import working_set, parse_requirements
+        for dist in working_set.resolve(
+            parse_requirements(requires), installer=self.fetch_build_egg
+        ):
+            working_set.add(dist)
+
+    def finalize_options(self):
+        _Distribution.finalize_options(self)
+        if self.features:
+            self._set_global_opts_from_features()
+
+        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+            value = getattr(self,ep.name,None)
+            if value is not None:
+                ep.require(installer=self.fetch_build_egg)
+                ep.load()(self, ep.name, value)
+        if getattr(self, 'convert_2to3_doctests', None):
+            # XXX may convert to set here when we can rely on set being builtin
+            self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
+        else:
+            self.convert_2to3_doctests = []
+
+    def fetch_build_egg(self, req):
+        """Fetch an egg needed for building"""
+
+        try:
+            cmd = self._egg_fetcher
+            cmd.package_index.to_scan = []
+        except AttributeError:
+            from setuptools.command.easy_install import easy_install
+            dist = self.__class__({'script_args':['easy_install']})
+            dist.parse_config_files()
+            opts = dist.get_option_dict('easy_install')
+            keep = (
+                'find_links', 'site_dirs', 'index_url', 'optimize',
+                'site_dirs', 'allow_hosts'
+            )
+            for key in opts.keys():
+                if key not in keep:
+                    del opts[key]   # don't use any other settings
+            if self.dependency_links:
+                links = self.dependency_links[:]
+                if 'find_links' in opts:
+                    links = opts['find_links'][1].split() + links
+                opts['find_links'] = ('setup', links)
+            cmd = easy_install(
+                dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
+                always_copy=False, build_directory=None, editable=False,
+                upgrade=False, multi_version=True, no_report=True, user=False
+            )
+            cmd.ensure_finalized()
+            self._egg_fetcher = cmd
+        return cmd.easy_install(req)
+
+    def _set_global_opts_from_features(self):
+        """Add --with-X/--without-X options based on optional features"""
+
+        go = []
+        no = self.negative_opt.copy()
+
+        for name,feature in self.features.items():
+            self._set_feature(name,None)
+            feature.validate(self)
+
+            if feature.optional:
+                descr = feature.description
+                incdef = ' (default)'
+                excdef=''
+                if not feature.include_by_default():
+                    excdef, incdef = incdef, excdef
+
+                go.append(('with-'+name, None, 'include '+descr+incdef))
+                go.append(('without-'+name, None, 'exclude '+descr+excdef))
+                no['without-'+name] = 'with-'+name
+
+        self.global_options = self.feature_options = go + self.global_options
+        self.negative_opt = self.feature_negopt = no
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def _finalize_features(self):
+        """Add/remove features and resolve dependencies between them"""
+
+        # First, flag all the enabled items (and thus their dependencies)
+        for name,feature in self.features.items():
+            enabled = self.feature_is_included(name)
+            if enabled or (enabled is None and feature.include_by_default()):
+                feature.include_in(self)
+                self._set_feature(name,1)
+
+        # Then disable the rest, so that off-by-default features don't
+        # get flagged as errors when they're required by an enabled feature
+        for name,feature in self.features.items():
+            if not self.feature_is_included(name):
+                feature.exclude_from(self)
+                self._set_feature(name,0)
+
+
+    def get_command_class(self, command):
+        """Pluggable version of get_command_class()"""
+        if command in self.cmdclass:
+            return self.cmdclass[command]
+
+        for ep in pkg_resources.iter_entry_points('distutils.commands',command):
+            ep.require(installer=self.fetch_build_egg)
+            self.cmdclass[command] = cmdclass = ep.load()
+            return cmdclass
+        else:
+            return _Distribution.get_command_class(self, command)
+
+    def print_commands(self):
+        for ep in pkg_resources.iter_entry_points('distutils.commands'):
+            if ep.name not in self.cmdclass:
+                cmdclass = ep.load(False) # don't require extras, we're not running
+                self.cmdclass[ep.name] = cmdclass
+        return _Distribution.print_commands(self)
+
+
+
+
+
+    def _set_feature(self,name,status):
+        """Set feature's inclusion status"""
+        setattr(self,self._feature_attrname(name),status)
+
+    def feature_is_included(self,name):
+        """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
+        return getattr(self,self._feature_attrname(name))
+
+    def include_feature(self,name):
+        """Request inclusion of feature named 'name'"""
+
+        if self.feature_is_included(name)==0:
+            descr = self.features[name].description
+            raise DistutilsOptionError(
+               descr + " is required, but was excluded or is not available"
+           )
+        self.features[name].include_in(self)
+        self._set_feature(name,1)
+
+    def include(self,**attrs):
+        """Add items to distribution that are named in keyword arguments
+
+        For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
+        the distribution's 'py_modules' attribute, if it was not already
+        there.
+
+        Currently, this method only supports inclusion for attributes that are
+        lists or tuples.  If you need to add support for adding to other
+        attributes in this or a subclass, you can add an '_include_X' method,
+        where 'X' is the name of the attribute.  The method will be called with
+        the value passed to 'include()'.  So, 'dist.include(foo={"bar":"baz"})'
+        will try to call 'dist._include_foo({"bar":"baz"})', which can then
+        handle whatever special inclusion logic is needed.
+        """
+        for k,v in attrs.items():
+            include = getattr(self, '_include_'+k, None)
+            if include:
+                include(v)
+            else:
+                self._include_misc(k,v)
+
+    def exclude_package(self,package):
+        """Remove packages, modules, and extensions in named package"""
+
+        pfx = package+'.'
+        if self.packages:
+            self.packages = [
+                p for p in self.packages
+                    if p != package and not p.startswith(pfx)
+            ]
+
+        if self.py_modules:
+            self.py_modules = [
+                p for p in self.py_modules
+                    if p != package and not p.startswith(pfx)
+            ]
+
+        if self.ext_modules:
+            self.ext_modules = [
+                p for p in self.ext_modules
+                    if p.name != package and not p.name.startswith(pfx)
+            ]
+
+
+    def has_contents_for(self,package):
+        """Return true if 'exclude_package(package)' would do something"""
+
+        pfx = package+'.'
+
+        for p in self.iter_distribution_names():
+            if p==package or p.startswith(pfx):
+                return True
+
+
+
+
+
+
+
+
+
+
+    def _exclude_misc(self,name,value):
+        """Handle 'exclude()' for list/tuple attrs without a special handler"""
+        if not isinstance(value,sequence):
+            raise DistutilsSetupError(
+                "%s: setting must be a list or tuple (%r)" % (name, value)
+            )
+        try:
+            old = getattr(self,name)
+        except AttributeError:
+            raise DistutilsSetupError(
+                "%s: No such distribution setting" % name
+            )
+        if old is not None and not isinstance(old,sequence):
+            raise DistutilsSetupError(
+                name+": this setting cannot be changed via include/exclude"
+            )
+        elif old:
+            setattr(self,name,[item for item in old if item not in value])
+
+    def _include_misc(self,name,value):
+        """Handle 'include()' for list/tuple attrs without a special handler"""
+
+        if not isinstance(value,sequence):
+            raise DistutilsSetupError(
+                "%s: setting must be a list (%r)" % (name, value)
+            )
+        try:
+            old = getattr(self,name)
+        except AttributeError:
+            raise DistutilsSetupError(
+                "%s: No such distribution setting" % name
+            )
+        if old is None:
+            setattr(self,name,value)
+        elif not isinstance(old,sequence):
+            raise DistutilsSetupError(
+                name+": this setting cannot be changed via include/exclude"
+            )
+        else:
+            setattr(self,name,old+[item for item in value if item not in old])
+
+    def exclude(self,**attrs):
+        """Remove items from distribution that are named in keyword arguments
+
+        For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
+        the distribution's 'py_modules' attribute.  Excluding packages uses
+        the 'exclude_package()' method, so all of the package's contained
+        packages, modules, and extensions are also excluded.
+
+        Currently, this method only supports exclusion from attributes that are
+        lists or tuples.  If you need to add support for excluding from other
+        attributes in this or a subclass, you can add an '_exclude_X' method,
+        where 'X' is the name of the attribute.  The method will be called with
+        the value passed to 'exclude()'.  So, 'dist.exclude(foo={"bar":"baz"})'
+        will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
+        handle whatever special exclusion logic is needed.
+        """
+        for k,v in attrs.items():
+            exclude = getattr(self, '_exclude_'+k, None)
+            if exclude:
+                exclude(v)
+            else:
+                self._exclude_misc(k,v)
+
+    def _exclude_packages(self,packages):
+        if not isinstance(packages,sequence):
+            raise DistutilsSetupError(
+                "packages: setting must be a list or tuple (%r)" % (packages,)
+            )
+        map(self.exclude_package, packages)
+
+
+
+
+
+
+
+
+
+
+
+
+    def _parse_command_opts(self, parser, args):
+        # Remove --with-X/--without-X options when processing command args
+        self.global_options = self.__class__.global_options
+        self.negative_opt = self.__class__.negative_opt
+
+        # First, expand any aliases
+        command = args[0]
+        aliases = self.get_option_dict('aliases')
+        while command in aliases:
+            src,alias = aliases[command]
+            del aliases[command]    # ensure each alias can expand only once!
+            import shlex
+            args[:1] = shlex.split(alias,True)
+            command = args[0]
+
+        nargs = _Distribution._parse_command_opts(self, parser, args)
+
+        # Handle commands that want to consume all remaining arguments
+        cmd_class = self.get_command_class(command)
+        if getattr(cmd_class,'command_consumes_arguments',None):
+            self.get_option_dict(command)['args'] = ("command line", nargs)
+            if nargs is not None:
+                return []
+
+        return nargs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def get_cmdline_options(self):
+        """Return a '{cmd: {opt:val}}' map of all command-line options
+
+        Option names are all long, but do not include the leading '--', and
+        contain dashes rather than underscores.  If the option doesn't take
+        an argument (e.g. '--quiet'), the 'val' is 'None'.
+
+        Note that options provided by config files are intentionally excluded.
+        """
+
+        d = {}
+
+        for cmd,opts in self.command_options.items():
+
+            for opt,(src,val) in opts.items():
+
+                if src != "command line":
+                    continue
+
+                opt = opt.replace('_','-')
+
+                if val==0:
+                    cmdobj = self.get_command_obj(cmd)
+                    neg_opt = self.negative_opt.copy()
+                    neg_opt.update(getattr(cmdobj,'negative_opt',{}))
+                    for neg,pos in neg_opt.items():
+                        if pos==opt:
+                            opt=neg
+                            val=None
+                            break
+                    else:
+                        raise AssertionError("Shouldn't be able to get here")
+
+                elif val==1:
+                    val = None
+
+                d.setdefault(cmd,{})[opt] = val
+
+        return d
+
+
+    def iter_distribution_names(self):
+        """Yield all packages, modules, and extension names in distribution"""
+
+        for pkg in self.packages or ():
+            yield pkg
+
+        for module in self.py_modules or ():
+            yield module
+
+        for ext in self.ext_modules or ():
+            if isinstance(ext,tuple):
+                name, buildinfo = ext
+            else:
+                name = ext.name
+            if name.endswith('module'):
+                name = name[:-6]
+            yield name
+
+
+    def handle_display_options(self, option_order):
+        """If there were any non-global "display-only" options
+        (--help-commands or the metadata display options) on the command
+        line, display the requested info and return true; else return
+        false.
+        """
+        import sys
+
+        if sys.version_info < (3,) or self.help_commands:
+            return _Distribution.handle_display_options(self, option_order)
+
+        # Stdout may be StringIO (e.g. in tests)
+        import io
+        if not isinstance(sys.stdout, io.TextIOWrapper):
+            return _Distribution.handle_display_options(self, option_order)
+
+        # Don't wrap stdout if utf-8 is already the encoding. Provides
+        #  workaround for #334.
+        if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
+            return _Distribution.handle_display_options(self, option_order)
+
+        # Print metadata in UTF-8 no matter the platform
+        encoding = sys.stdout.encoding
+        errors = sys.stdout.errors
+        newline = sys.platform != 'win32' and '\n' or None
+        line_buffering = sys.stdout.line_buffering
+
+        sys.stdout = io.TextIOWrapper(
+            sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
+        try:
+            return _Distribution.handle_display_options(self, option_order)
+        finally:
+            sys.stdout = io.TextIOWrapper(
+                sys.stdout.detach(), encoding, errors, newline, line_buffering)
+
+
+# Install it throughout the distutils
+for module in distutils.dist, distutils.core, distutils.cmd:
+    module.Distribution = Distribution
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class Feature:
+    """A subset of the distribution that can be excluded if unneeded/wanted
+
+    Features are created using these keyword arguments:
+
+      'description' -- a short, human readable description of the feature, to
+         be used in error messages, and option help messages.
+
+      'standard' -- if true, the feature is included by default if it is
+         available on the current system.  Otherwise, the feature is only
+         included if requested via a command line '--with-X' option, or if
+         another included feature requires it.  The default setting is 'False'.
+
+      'available' -- if true, the feature is available for installation on the
+         current system.  The default setting is 'True'.
+
+      'optional' -- if true, the feature's inclusion can be controlled from the
+         command line, using the '--with-X' or '--without-X' options.  If
+         false, the feature's inclusion status is determined automatically,
+         based on 'availabile', 'standard', and whether any other feature
+         requires it.  The default setting is 'True'.
+
+      'require_features' -- a string or sequence of strings naming features
+         that should also be included if this feature is included.  Defaults to
+         empty list.  May also contain 'Require' objects that should be
+         added/removed from the distribution.
+
+      'remove' -- a string or list of strings naming packages to be removed
+         from the distribution if this feature is *not* included.  If the
+         feature *is* included, this argument is ignored.  This argument exists
+         to support removing features that "crosscut" a distribution, such as
+         defining a 'tests' feature that removes all the 'tests' subpackages
+         provided by other features.  The default for this argument is an empty
+         list.  (Note: the named package(s) or modules must exist in the base
+         distribution when the 'setup()' function is initially called.)
+
+      other keywords -- any other keyword arguments are saved, and passed to
+         the distribution's 'include()' and 'exclude()' methods when the
+         feature is included or excluded, respectively.  So, for example, you
+         could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
+         added or removed from the distribution as appropriate.
+
+    A feature must include at least one 'requires', 'remove', or other
+    keyword argument.  Otherwise, it can't affect the distribution in any way.
+    Note also that you can subclass 'Feature' to create your own specialized
+    feature types that modify the distribution in other ways when included or
+    excluded.  See the docstrings for the various methods here for more detail.
+    Aside from the methods, the only feature attributes that distributions look
+    at are 'description' and 'optional'.
+    """
+    def __init__(self, description, standard=False, available=True,
+        optional=True, require_features=(), remove=(), **extras
+    ):
+
+        self.description = description
+        self.standard = standard
+        self.available = available
+        self.optional = optional
+        if isinstance(require_features,(str,Require)):
+            require_features = require_features,
+
+        self.require_features = [
+            r for r in require_features if isinstance(r,str)
+        ]
+        er = [r for r in require_features if not isinstance(r,str)]
+        if er: extras['require_features'] = er
+
+        if isinstance(remove,str):
+            remove = remove,
+        self.remove = remove
+        self.extras = extras
+
+        if not remove and not require_features and not extras:
+            raise DistutilsSetupError(
+                "Feature %s: must define 'require_features', 'remove', or at least one"
+                " of 'packages', 'py_modules', etc."
+            )
+
+    def include_by_default(self):
+        """Should this feature be included by default?"""
+        return self.available and self.standard
+
+    def include_in(self,dist):
+
+        """Ensure feature and its requirements are included in distribution
+
+        You may override this in a subclass to perform additional operations on
+        the distribution.  Note that this method may be called more than once
+        per feature, and so should be idempotent.
+
+        """
+
+        if not self.available:
+            raise DistutilsPlatformError(
+                self.description+" is required,"
+                "but is not available on this platform"
+            )
+
+        dist.include(**self.extras)
+
+        for f in self.require_features:
+            dist.include_feature(f)
+
+
+
+    def exclude_from(self,dist):
+
+        """Ensure feature is excluded from distribution
+
+        You may override this in a subclass to perform additional operations on
+        the distribution.  This method will be called at most once per
+        feature, and only after all included features have been asked to
+        include themselves.
+        """
+
+        dist.exclude(**self.extras)
+
+        if self.remove:
+            for item in self.remove:
+                dist.exclude_package(item)
+
+
+
+    def validate(self,dist):
+
+        """Verify that feature makes sense in context of distribution
+
+        This method is called by the distribution just before it parses its
+        command line.  It checks to ensure that the 'remove' attribute, if any,
+        contains only valid package/module names that are present in the base
+        distribution when 'setup()' is called.  You may override it in a
+        subclass to perform any other required validation of the feature
+        against a target distribution.
+        """
+
+        for item in self.remove:
+            if not dist.has_contents_for(item):
+                raise DistutilsSetupError(
+                    "%s wants to be able to remove %s, but the distribution"
+                    " doesn't contain any packages or modules under %s"
+                    % (self.description, item, item)
+                )
+
+
+
+def check_packages(dist, attr, value):
+    for pkgname in value:
+        if not re.match(r'\w+(\.\w+)*', pkgname):
+            distutils.log.warn(
+                "WARNING: %r not a valid package name; please use only"
+                ".-separated package names in setup.py", pkgname
+            )
+
diff --git a/vendor/distribute-0.6.35/setuptools/extension.py b/vendor/distribute-0.6.35/setuptools/extension.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb8b836cc316ffaccfd64cef48a50e36b5a674e9
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/extension.py
@@ -0,0 +1,46 @@
+import sys
+import distutils.core
+import distutils.extension
+
+from setuptools.dist import _get_unpatched
+
+_Extension = _get_unpatched(distutils.core.Extension)
+
+def have_pyrex():
+    """
+    Return True if Cython or Pyrex can be imported.
+    """
+    pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext'
+    for pyrex_impl in pyrex_impls:
+        try:
+            # from (pyrex_impl) import build_ext
+            __import__(pyrex_impl, fromlist=['build_ext']).build_ext
+            return True
+        except Exception:
+            pass
+    return False
+
+
+class Extension(_Extension):
+    """Extension that uses '.c' files in place of '.pyx' files"""
+
+    def __init__(self, *args, **kw):
+        _Extension.__init__(self, *args, **kw)
+        if not have_pyrex():
+            self._convert_pyx_sources_to_c()
+
+    def _convert_pyx_sources_to_c(self):
+        "convert .pyx extensions to .c"
+        def pyx_to_c(source):
+            if source.endswith('.pyx'):
+                source = source[:-4] + '.c'
+            return source
+        self.sources = map(pyx_to_c, self.sources)
+
+class Library(Extension):
+    """Just like a regular Extension, but built as a library instead"""
+
+distutils.core.Extension = Extension
+distutils.extension.Extension = Extension
+if 'distutils.command.build_ext' in sys.modules:
+    sys.modules['distutils.command.build_ext'].Extension = Extension
diff --git a/vendor/distribute-0.6.35/setuptools/gui-32.exe b/vendor/distribute-0.6.35/setuptools/gui-32.exe
new file mode 100755
index 0000000000000000000000000000000000000000..3f64af7de42fd6597b4c6cf50896d32a98a7d6a2
Binary files /dev/null and b/vendor/distribute-0.6.35/setuptools/gui-32.exe differ
diff --git a/vendor/distribute-0.6.35/setuptools/gui-64.exe b/vendor/distribute-0.6.35/setuptools/gui-64.exe
new file mode 100755
index 0000000000000000000000000000000000000000..3ab4378e1d401d198b92a33ce249d29bcdb26a63
Binary files /dev/null and b/vendor/distribute-0.6.35/setuptools/gui-64.exe differ
diff --git a/vendor/distribute-0.6.35/setuptools/gui.exe b/vendor/distribute-0.6.35/setuptools/gui.exe
new file mode 100755
index 0000000000000000000000000000000000000000..3f64af7de42fd6597b4c6cf50896d32a98a7d6a2
Binary files /dev/null and b/vendor/distribute-0.6.35/setuptools/gui.exe differ
diff --git a/vendor/distribute-0.6.35/setuptools/package_index.py b/vendor/distribute-0.6.35/setuptools/package_index.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ee21e3b7bbbd045f79dc382a09557a42ca17cb7
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/package_index.py
@@ -0,0 +1,920 @@
+"""PyPI and direct package downloading"""
+import sys, os.path, re, urlparse, urllib, urllib2, shutil, random, socket, cStringIO
+import base64
+import httplib
+from pkg_resources import *
+from distutils import log
+from distutils.errors import DistutilsError
+try:
+    from hashlib import md5
+except ImportError:
+    from md5 import md5
+from fnmatch import translate
+
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+# this is here to fix emacs' cruddy broken syntax highlighting
+PYPI_MD5 = re.compile(
+    '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
+    'href="[^?]+\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
+)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
+
+__all__ = [
+    'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
+    'interpret_distro_name',
+]
+
+_SOCKET_TIMEOUT = 15
+
+def parse_bdist_wininst(name):
+    """Return (base,pyversion) or (None,None) for possible .exe name"""
+
+    lower = name.lower()
+    base, py_ver, plat = None, None, None
+
+    if lower.endswith('.exe'):
+        if lower.endswith('.win32.exe'):
+            base = name[:-10]
+            plat = 'win32'
+        elif lower.startswith('.win32-py',-16):
+            py_ver = name[-7:-4]
+            base = name[:-16]
+            plat = 'win32'
+        elif lower.endswith('.win-amd64.exe'):
+            base = name[:-14]
+            plat = 'win-amd64'
+        elif lower.startswith('.win-amd64-py',-20):
+            py_ver = name[-7:-4]
+            base = name[:-20]
+            plat = 'win-amd64'
+    return base,py_ver,plat
+
+
+def egg_info_for_url(url):
+    scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
+    base = urllib2.unquote(path.split('/')[-1])
+    if '#' in base: base, fragment = base.split('#',1)
+    return base,fragment
+
+def distros_for_url(url, metadata=None):
+    """Yield egg or source distribution objects that might be found at a URL"""
+    base, fragment = egg_info_for_url(url)
+    for dist in distros_for_location(url, base, metadata): yield dist
+    if fragment:
+        match = EGG_FRAGMENT.match(fragment)
+        if match:
+            for dist in interpret_distro_name(
+                url, match.group(1), metadata, precedence = CHECKOUT_DIST
+            ):
+                yield dist
+
+def distros_for_location(location, basename, metadata=None):
+    """Yield egg or source distribution objects based on basename"""
+    if basename.endswith('.egg.zip'):
+        basename = basename[:-4]    # strip the .zip
+    if basename.endswith('.egg') and '-' in basename:
+        # only one, unambiguous interpretation
+        return [Distribution.from_location(location, basename, metadata)]
+
+    if basename.endswith('.exe'):
+        win_base, py_ver, platform = parse_bdist_wininst(basename)
+        if win_base is not None:
+            return interpret_distro_name(
+                location, win_base, metadata, py_ver, BINARY_DIST, platform
+            )
+
+    # Try source distro extensions (.zip, .tgz, etc.)
+    #
+    for ext in EXTENSIONS:
+        if basename.endswith(ext):
+            basename = basename[:-len(ext)]
+            return interpret_distro_name(location, basename, metadata)
+    return []  # no extension matched
+
+def distros_for_filename(filename, metadata=None):
+    """Yield possible egg or source distribution objects based on a filename"""
+    return distros_for_location(
+        normalize_path(filename), os.path.basename(filename), metadata
+    )
+
+
+def interpret_distro_name(location, basename, metadata,
+    py_version=None, precedence=SOURCE_DIST, platform=None
+):
+    """Generate alternative interpretations of a source distro name
+
+    Note: if `location` is a filesystem filename, you should call
+    ``pkg_resources.normalize_path()`` on it before passing it to this
+    routine!
+    """
+    # Generate alternative interpretations of a source distro name
+    # Because some packages are ambiguous as to name/versions split
+    # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
+    # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
+    # "adns-python, 1.1.0", and "adns-python-1.1.0, no version").  In practice,
+    # the spurious interpretations should be ignored, because in the event
+    # there's also an "adns" package, the spurious "python-1.1.0" version will
+    # compare lower than any numeric version number, and is therefore unlikely
+    # to match a request for it.  It's still a potential problem, though, and
+    # in the long run PyPI and the distutils should go for "safe" names and
+    # versions in distribution archive names (sdist and bdist).
+
+    parts = basename.split('-')
+    if not py_version:
+        for i,p in enumerate(parts[2:]):
+            if len(p)==5 and p.startswith('py2.'):
+                return # It's a bdist_dumb, not an sdist -- bail out
+
+    for p in range(1,len(parts)+1):
+        yield Distribution(
+            location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
+            py_version=py_version, precedence = precedence,
+            platform = platform
+        )
+
+REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+# this line is here to fix emacs' cruddy broken syntax highlighting
+
+def find_external_links(url, page):
+    """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
+
+    for match in REL.finditer(page):
+        tag, rel = match.groups()
+        rels = map(str.strip, rel.lower().split(','))
+        if 'homepage' in rels or 'download' in rels:
+            for match in HREF.finditer(tag):
+                yield urlparse.urljoin(url, htmldecode(match.group(1)))
+
+    for tag in ("<th>Home Page", "<th>Download URL"):
+        pos = page.find(tag)
+        if pos!=-1:
+            match = HREF.search(page,pos)
+            if match:
+                yield urlparse.urljoin(url, htmldecode(match.group(1)))
+
+user_agent = "Python-urllib/%s distribute/%s" % (
+    sys.version[:3], require('distribute')[0].version
+)
+
+
+class PackageIndex(Environment):
+    """A distribution index that scans web pages for download URLs"""
+
+    def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',),
+        *args, **kw
+    ):
+        Environment.__init__(self,*args,**kw)
+        self.index_url = index_url + "/"[:not index_url.endswith('/')]
+        self.scanned_urls = {}
+        self.fetched_urls = {}
+        self.package_pages = {}
+        self.allows = re.compile('|'.join(map(translate,hosts))).match
+        self.to_scan = []
+
+
+
+    def process_url(self, url, retrieve=False):
+        """Evaluate a URL as a possible download, and maybe retrieve it"""
+        if url in self.scanned_urls and not retrieve:
+            return
+        self.scanned_urls[url] = True
+        if not URL_SCHEME(url):
+            self.process_filename(url)
+            return
+        else:
+            dists = list(distros_for_url(url))
+            if dists:
+                if not self.url_ok(url):
+                    return
+                self.debug("Found link: %s", url)
+
+        if dists or not retrieve or url in self.fetched_urls:
+            map(self.add, dists)
+            return  # don't need the actual page
+
+        if not self.url_ok(url):
+            self.fetched_urls[url] = True
+            return
+
+        self.info("Reading %s", url)
+        f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url)
+        if f is None: return
+        self.fetched_urls[url] = self.fetched_urls[f.url] = True
+
+        if 'html' not in f.headers.get('content-type', '').lower():
+            f.close()   # not html, we can't process it
+            return
+
+        base = f.url     # handle redirects
+        page = f.read()
+        if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
+            if isinstance(f, urllib2.HTTPError):
+                # Errors have no charset, assume latin1:
+                charset = 'latin-1'
+            else:
+                charset = f.headers.get_param('charset') or 'latin-1'
+            page = page.decode(charset, "ignore")
+        f.close()
+        for match in HREF.finditer(page):
+            link = urlparse.urljoin(base, htmldecode(match.group(1)))
+            self.process_url(link)
+        if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
+            page = self.process_index(url, page)
+
+    def process_filename(self, fn, nested=False):
+        # process filenames or directories
+        if not os.path.exists(fn):
+            self.warn("Not found: %s", fn)
+            return
+
+        if os.path.isdir(fn) and not nested:
+            path = os.path.realpath(fn)
+            for item in os.listdir(path):
+                self.process_filename(os.path.join(path,item), True)
+
+        dists = distros_for_filename(fn)
+        if dists:
+            self.debug("Found: %s", fn)
+            map(self.add, dists)
+
+    def url_ok(self, url, fatal=False):
+        s = URL_SCHEME(url)
+        if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]):
+            return True
+        msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
+        if fatal:
+            raise DistutilsError(msg % url)
+        else:
+            self.warn(msg, url)
+
+    def scan_egg_links(self, search_path):
+        for item in search_path:
+            if os.path.isdir(item):
+                for entry in os.listdir(item):
+                    if entry.endswith('.egg-link'):
+                        self.scan_egg_link(item, entry)
+
+    def scan_egg_link(self, path, entry):
+        lines = filter(None, map(str.strip, open(os.path.join(path, entry))))
+        if len(lines)==2:
+            for dist in find_distributions(os.path.join(path, lines[0])):
+                dist.location = os.path.join(path, *lines)
+                dist.precedence = SOURCE_DIST
+                self.add(dist)
+
+    def process_index(self,url,page):
+        """Process the contents of a PyPI page"""
+        def scan(link):
+            # Process a URL to see if it's for a package page
+            if link.startswith(self.index_url):
+                parts = map(
+                    urllib2.unquote, link[len(self.index_url):].split('/')
+                )
+                if len(parts)==2 and '#' not in parts[1]:
+                    # it's a package page, sanitize and index it
+                    pkg = safe_name(parts[0])
+                    ver = safe_version(parts[1])
+                    self.package_pages.setdefault(pkg.lower(),{})[link] = True
+                    return to_filename(pkg), to_filename(ver)
+            return None, None
+
+        # process an index page into the package-page index
+        for match in HREF.finditer(page):
+            try:
+                scan( urlparse.urljoin(url, htmldecode(match.group(1))) )
+            except ValueError:
+                pass
+
+        pkg, ver = scan(url)   # ensure this page is in the page index
+        if pkg:
+            # process individual package page
+            for new_url in find_external_links(url, page):
+                # Process the found URL
+                base, frag = egg_info_for_url(new_url)
+                if base.endswith('.py') and not frag:
+                    if ver:
+                        new_url+='#egg=%s-%s' % (pkg,ver)
+                    else:
+                        self.need_version_info(url)
+                self.scan_url(new_url)
+
+            return PYPI_MD5.sub(
+                lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
+            )
+        else:
+            return ""   # no sense double-scanning non-package pages
+
+
+
+    def need_version_info(self, url):
+        self.scan_all(
+            "Page at %s links to .py file(s) without version info; an index "
+            "scan is required.", url
+        )
+
+    def scan_all(self, msg=None, *args):
+        if self.index_url not in self.fetched_urls:
+            if msg: self.warn(msg,*args)
+            self.info(
+                "Scanning index of all packages (this may take a while)"
+            )
+        self.scan_url(self.index_url)
+
+    def find_packages(self, requirement):
+        self.scan_url(self.index_url + requirement.unsafe_name+'/')
+
+        if not self.package_pages.get(requirement.key):
+            # Fall back to safe version of the name
+            self.scan_url(self.index_url + requirement.project_name+'/')
+
+        if not self.package_pages.get(requirement.key):
+            # We couldn't find the target package, so search the index page too
+            self.not_found_in_index(requirement)
+
+        for url in list(self.package_pages.get(requirement.key,())):
+            # scan each page that might be related to the desired package
+            self.scan_url(url)
+
+    def obtain(self, requirement, installer=None):
+        self.prescan(); self.find_packages(requirement)
+        for dist in self[requirement.key]:
+            if dist in requirement:
+                return dist
+            self.debug("%s does not match %s", requirement, dist)
+        return super(PackageIndex, self).obtain(requirement,installer)
+
+
+
+
+
+    def check_md5(self, cs, info, filename, tfp):
+        if re.match('md5=[0-9a-f]{32}$', info):
+            self.debug("Validating md5 checksum for %s", filename)
+            if cs.hexdigest()<>info[4:]:
+                tfp.close()
+                os.unlink(filename)
+                raise DistutilsError(
+                    "MD5 validation failed for "+os.path.basename(filename)+
+                    "; possible download problem?"
+                )
+
+    def add_find_links(self, urls):
+        """Add `urls` to the list that will be prescanned for searches"""
+        for url in urls:
+            if (
+                self.to_scan is None        # if we have already "gone online"
+                or not URL_SCHEME(url)      # or it's a local file/directory
+                or url.startswith('file:')
+                or list(distros_for_url(url))   # or a direct package link
+            ):
+                # then go ahead and process it now
+                self.scan_url(url)
+            else:
+                # otherwise, defer retrieval till later
+                self.to_scan.append(url)
+
+    def prescan(self):
+        """Scan urls scheduled for prescanning (e.g. --find-links)"""
+        if self.to_scan:
+            map(self.scan_url, self.to_scan)
+        self.to_scan = None     # from now on, go ahead and process immediately
+
+    def not_found_in_index(self, requirement):
+        if self[requirement.key]:   # we've seen at least one distro
+            meth, msg = self.info, "Couldn't retrieve index page for %r"
+        else:   # no distros seen for this name, might be misspelled
+            meth, msg = (self.warn,
+                "Couldn't find index page for %r (maybe misspelled?)")
+        meth(msg, requirement.unsafe_name)
+        self.scan_all()
+
+    def download(self, spec, tmpdir):
+        """Locate and/or download `spec` to `tmpdir`, returning a local path
+
+        `spec` may be a ``Requirement`` object, or a string containing a URL,
+        an existing local filename, or a project/version requirement spec
+        (i.e. the string form of a ``Requirement`` object).  If it is the URL
+        of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
+        that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
+        automatically created alongside the downloaded file.
+
+        If `spec` is a ``Requirement`` object or a string containing a
+        project/version requirement spec, this method returns the location of
+        a matching distribution (possibly after downloading it to `tmpdir`).
+        If `spec` is a locally existing file or directory name, it is simply
+        returned unchanged.  If `spec` is a URL, it is downloaded to a subpath
+        of `tmpdir`, and the local filename is returned.  Various errors may be
+        raised if a problem occurs during downloading.
+        """
+        if not isinstance(spec,Requirement):
+            scheme = URL_SCHEME(spec)
+            if scheme:
+                # It's a url, download it to tmpdir
+                found = self._download_url(scheme.group(1), spec, tmpdir)
+                base, fragment = egg_info_for_url(spec)
+                if base.endswith('.py'):
+                    found = self.gen_setup(found,fragment,tmpdir)
+                return found
+            elif os.path.exists(spec):
+                # Existing file or directory, just return it
+                return spec
+            else:
+                try:
+                    spec = Requirement.parse(spec)
+                except ValueError:
+                    raise DistutilsError(
+                        "Not a URL, existing file, or requirement spec: %r" %
+                        (spec,)
+                    )
+        return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
+
+
+    def fetch_distribution(self,
+        requirement, tmpdir, force_scan=False, source=False, develop_ok=False,
+        local_index=None
+    ):
+        """Obtain a distribution suitable for fulfilling `requirement`
+
+        `requirement` must be a ``pkg_resources.Requirement`` instance.
+        If necessary, or if the `force_scan` flag is set, the requirement is
+        searched for in the (online) package index as well as the locally
+        installed packages.  If a distribution matching `requirement` is found,
+        the returned distribution's ``location`` is the value you would have
+        gotten from calling the ``download()`` method with the matching
+        distribution's URL or filename.  If no matching distribution is found,
+        ``None`` is returned.
+
+        If the `source` flag is set, only source distributions and source
+        checkout links will be considered.  Unless the `develop_ok` flag is
+        set, development and system eggs (i.e., those using the ``.egg-info``
+        format) will be ignored.
+        """
+
+        # process a Requirement
+        self.info("Searching for %s", requirement)
+        skipped = {}
+        dist = None
+
+        def find(req, env=None):
+            if env is None:
+                env = self
+            # Find a matching distribution; may be called more than once
+
+            for dist in env[req.key]:
+
+                if dist.precedence==DEVELOP_DIST and not develop_ok:
+                    if dist not in skipped:
+                        self.warn("Skipping development or system egg: %s",dist)
+                        skipped[dist] = 1
+                    continue
+
+                if dist in req and (dist.precedence<=SOURCE_DIST or not source):
+                    self.info("Best match: %s", dist)
+                    return dist.clone(
+                        location=self.download(dist.location, tmpdir)
+                    )
+
+        if force_scan:
+            self.prescan()
+            self.find_packages(requirement)
+            dist = find(requirement)
+
+        if local_index is not None:
+            dist = dist or find(requirement, local_index)
+
+        if dist is None and self.to_scan is not None:
+            self.prescan()
+            dist = find(requirement)
+
+        if dist is None and not force_scan:
+            self.find_packages(requirement)
+            dist = find(requirement)
+
+        if dist is None:
+            self.warn(
+                "No local packages or download links found for %s%s",
+                (source and "a source distribution of " or ""),
+                requirement,
+            )
+        return dist
+
+    def fetch(self, requirement, tmpdir, force_scan=False, source=False):
+        """Obtain a file suitable for fulfilling `requirement`
+
+        DEPRECATED; use the ``fetch_distribution()`` method now instead.  For
+        backward compatibility, this routine is identical but returns the
+        ``location`` of the downloaded distribution instead of a distribution
+        object.
+        """
+        dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
+        if dist is not None:
+            return dist.location
+        return None
+
+
+
+
+
+
+
+
+    def gen_setup(self, filename, fragment, tmpdir):
+        match = EGG_FRAGMENT.match(fragment)
+        dists = match and [d for d in
+            interpret_distro_name(filename, match.group(1), None) if d.version
+        ] or []
+
+        if len(dists)==1:   # unambiguous ``#egg`` fragment
+            basename = os.path.basename(filename)
+
+            # Make sure the file has been downloaded to the temp dir.
+            if os.path.dirname(filename) != tmpdir:
+                dst = os.path.join(tmpdir, basename)
+                from setuptools.command.easy_install import samefile
+                if not samefile(filename, dst):
+                    shutil.copy2(filename, dst)
+                    filename=dst
+
+            file = open(os.path.join(tmpdir, 'setup.py'), 'w')
+            file.write(
+                "from setuptools import setup\n"
+                "setup(name=%r, version=%r, py_modules=[%r])\n"
+                % (
+                    dists[0].project_name, dists[0].version,
+                    os.path.splitext(basename)[0]
+                )
+            )
+            file.close()
+            return filename
+
+        elif match:
+            raise DistutilsError(
+                "Can't unambiguously interpret project/version identifier %r; "
+                "any dashes in the name or version should be escaped using "
+                "underscores. %r" % (fragment,dists)
+            )
+        else:
+            raise DistutilsError(
+                "Can't process plain .py files without an '#egg=name-version'"
+                " suffix to enable automatic setup script generation."
+            )
+
+    dl_blocksize = 8192
+    def _download_to(self, url, filename):
+        self.info("Downloading %s", url)
+        # Download the file
+        fp, tfp, info = None, None, None
+        try:
+            if '#' in url:
+                url, info = url.split('#', 1)
+            fp = self.open_url(url)
+            if isinstance(fp, urllib2.HTTPError):
+                raise DistutilsError(
+                    "Can't download %s: %s %s" % (url, fp.code,fp.msg)
+                )
+            cs = md5()
+            headers = fp.info()
+            blocknum = 0
+            bs = self.dl_blocksize
+            size = -1
+            if "content-length" in headers:
+                # Some servers return multiple Content-Length headers :(
+                content_length = headers.get("Content-Length")
+                size = int(content_length)
+                self.reporthook(url, filename, blocknum, bs, size)
+            tfp = open(filename,'wb')
+            while True:
+                block = fp.read(bs)
+                if block:
+                    cs.update(block)
+                    tfp.write(block)
+                    blocknum += 1
+                    self.reporthook(url, filename, blocknum, bs, size)
+                else:
+                    break
+            if info: self.check_md5(cs, info, filename, tfp)
+            return headers
+        finally:
+            if fp: fp.close()
+            if tfp: tfp.close()
+
+    def reporthook(self, url, filename, blocknum, blksize, size):
+        pass    # no-op
+
+
+    def open_url(self, url, warning=None):
+        if url.startswith('file:'):
+            return local_open(url)
+        try:
+            return open_with_auth(url)
+        except (ValueError, httplib.InvalidURL), v:
+            msg = ' '.join([str(arg) for arg in v.args])
+            if warning:
+                self.warn(warning, msg)
+            else:
+                raise DistutilsError('%s %s' % (url, msg))
+        except urllib2.HTTPError, v:
+            return v
+        except urllib2.URLError, v:
+            if warning:
+                self.warn(warning, v.reason)
+            else:
+                raise DistutilsError("Download error for %s: %s"
+                                     % (url, v.reason))
+        except httplib.BadStatusLine, v:
+            if warning:
+                self.warn(warning, v.line)
+            else:
+                raise DistutilsError('%s returned a bad status line. '
+                                     'The server might be down, %s' % \
+                                             (url, v.line))
+        except httplib.HTTPException, v:
+            if warning:
+                self.warn(warning, v)
+            else:
+                raise DistutilsError("Download error for %s: %s"
+                                     % (url, v))
+
+    def _download_url(self, scheme, url, tmpdir):
+        # Determine download filename
+        #
+        name = filter(None,urlparse.urlparse(url)[2].split('/'))
+        if name:
+            name = name[-1]
+            while '..' in name:
+                name = name.replace('..','.').replace('\\','_')
+        else:
+            name = "__downloaded__"    # default if URL has no path contents
+
+        if name.endswith('.egg.zip'):
+            name = name[:-4]    # strip the extra .zip before download
+
+        filename = os.path.join(tmpdir,name)
+
+        # Download the file
+        #
+        if scheme=='svn' or scheme.startswith('svn+'):
+            return self._download_svn(url, filename)
+        elif scheme=='git' or scheme.startswith('git+'):
+            return self._download_git(url, filename)
+        elif scheme.startswith('hg+'):
+            return self._download_hg(url, filename)
+        elif scheme=='file':
+            return urllib.url2pathname(urlparse.urlparse(url)[2])
+        else:
+            self.url_ok(url, True)   # raises error if not allowed
+            return self._attempt_download(url, filename)
+
+
+
+    def scan_url(self, url):
+        self.process_url(url, True)
+
+
+    def _attempt_download(self, url, filename):
+        headers = self._download_to(url, filename)
+        if 'html' in headers.get('content-type','').lower():
+            return self._download_html(url, headers, filename)
+        else:
+            return filename
+
+    def _download_html(self, url, headers, filename):
+        file = open(filename)
+        for line in file:
+            if line.strip():
+                # Check for a subversion index page
+                if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
+                    # it's a subversion index page:
+                    file.close()
+                    os.unlink(filename)
+                    return self._download_svn(url, filename)
+                break   # not an index page
+        file.close()
+        os.unlink(filename)
+        raise DistutilsError("Unexpected HTML page found at "+url)
+
+    def _download_svn(self, url, filename):
+        url = url.split('#',1)[0]   # remove any fragment for svn's sake
+        self.info("Doing subversion checkout from %s to %s", url, filename)
+        os.system("svn checkout -q %s %s" % (url, filename))
+        return filename
+
+    def _vcs_split_rev_from_url(self, url, pop_prefix=False):
+        scheme, netloc, path, query, frag = urlparse.urlsplit(url)
+
+        scheme = scheme.split('+', 1)[-1]
+
+        # Some fragment identification fails
+        path = path.split('#',1)[0]
+
+        rev = None
+        if '@' in path:
+            path, rev = path.rsplit('@', 1)
+
+        # Also, discard fragment
+        url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
+
+        return url, rev
+
+    def _download_git(self, url, filename):
+        filename = filename.split('#',1)[0]
+        url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+
+        self.info("Doing git clone from %s to %s", url, filename)
+        os.system("git clone --quiet %s %s" % (url, filename))
+
+        if rev is not None:
+            self.info("Checking out %s", rev)
+            os.system("(cd %s && git checkout --quiet %s)" % (
+                filename,
+                rev,
+            ))
+
+        return filename
+
+    def _download_hg(self, url, filename):
+        filename = filename.split('#',1)[0]
+        url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+
+        self.info("Doing hg clone from %s to %s", url, filename)
+        os.system("hg clone --quiet %s %s" % (url, filename))
+
+        if rev is not None:
+            self.info("Updating to %s", rev)
+            os.system("(cd %s && hg up -C -r %s >&-)" % (
+                filename,
+                rev,
+            ))
+
+        return filename
+
+    def debug(self, msg, *args):
+        log.debug(msg, *args)
+
+    def info(self, msg, *args):
+        log.info(msg, *args)
+
+    def warn(self, msg, *args):
+        log.warn(msg, *args)
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+
+def uchr(c):
+    if not isinstance(c, int):
+        return c
+    if c>255: return unichr(c)
+    return chr(c)
+
+def decode_entity(match):
+    what = match.group(1)
+    if what.startswith('#x'):
+        what = int(what[2:], 16)
+    elif what.startswith('#'):
+        what = int(what[1:])
+    else:
+        from htmlentitydefs import name2codepoint
+        what = name2codepoint.get(what, match.group(0))
+    return uchr(what)
+
+def htmldecode(text):
+    """Decode HTML entities in the given text."""
+    return entity_sub(decode_entity, text)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def socket_timeout(timeout=15):
+    def _socket_timeout(func):
+        def _socket_timeout(*args, **kwargs):
+            old_timeout = socket.getdefaulttimeout()
+            socket.setdefaulttimeout(timeout)
+            try:
+                return func(*args, **kwargs)
+            finally:
+                socket.setdefaulttimeout(old_timeout)
+        return _socket_timeout
+    return _socket_timeout
+
+def _encode_auth(auth):
+    """
+    A function compatible with Python 2.3-3.3 that will encode
+    auth from a URL suitable for an HTTP header.
+    >>> _encode_auth('username%3Apassword')
+    u'dXNlcm5hbWU6cGFzc3dvcmQ='
+    """
+    auth_s = urllib2.unquote(auth)
+    # convert to bytes
+    auth_bytes = auth_s.encode()
+    # use the legacy interface for Python 2.3 support
+    encoded_bytes = base64.encodestring(auth_bytes)
+    # convert back to a string
+    encoded = encoded_bytes.decode()
+    # strip the trailing carriage return
+    return encoded.rstrip()
+
+def open_with_auth(url):
+    """Open a urllib2 request, handling HTTP authentication"""
+
+    scheme, netloc, path, params, query, frag = urlparse.urlparse(url)
+
+    # Double scheme does not raise on Mac OS X as revealed by a
+    # failing test. We would expect "nonnumeric port". Refs #20.
+    if netloc.endswith(':'):
+        raise httplib.InvalidURL("nonnumeric port: ''")
+
+    if scheme in ('http', 'https'):
+        auth, host = urllib2.splituser(netloc)
+    else:
+        auth = None
+
+    if auth:
+        auth = "Basic " + _encode_auth(auth)
+        new_url = urlparse.urlunparse((scheme,host,path,params,query,frag))
+        request = urllib2.Request(new_url)
+        request.add_header("Authorization", auth)
+    else:
+        request = urllib2.Request(url)
+
+    request.add_header('User-Agent', user_agent)
+    fp = urllib2.urlopen(request)
+
+    if auth:
+        # Put authentication info back into request URL if same host,
+        # so that links found on the page will work
+        s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url)
+        if s2==scheme and h2==host:
+            fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2))
+
+    return fp
+
+# adding a timeout to avoid freezing package_index
+open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
+
+
+
+
+
+
+
+
+
+
+
+def fix_sf_url(url):
+    return url      # backward compatibility
+
+def local_open(url):
+    """Read a local path, with special support for directories"""
+    scheme, server, path, param, query, frag = urlparse.urlparse(url)
+    filename = urllib.url2pathname(path)
+    if os.path.isfile(filename):
+        return urllib2.urlopen(url)
+    elif path.endswith('/') and os.path.isdir(filename):
+        files = []
+        for f in os.listdir(filename):
+            if f=='index.html':
+                fp = open(os.path.join(filename,f),'rb')
+                body = fp.read()
+                fp.close()
+                break
+            elif os.path.isdir(os.path.join(filename,f)):
+                f+='/'
+            files.append("<a href=%r>%s</a>" % (f,f))
+        else:
+            body = ("<html><head><title>%s</title>" % url) + \
+                "</head><body>%s</body></html>" % '\n'.join(files)
+        status, message = 200, "OK"
+    else:
+        status, message, body = 404, "Path not found", "Not found"
+
+    return urllib2.HTTPError(url, status, message,
+            {'content-type':'text/html'}, cStringIO.StringIO(body))
+
+
+
+
+
+
+
+
+
+
+
+
+
+# this line is a kludge to keep the trailing blank lines for pje's editor
diff --git a/vendor/distribute-0.6.35/setuptools/sandbox.py b/vendor/distribute-0.6.35/setuptools/sandbox.py
new file mode 100644
index 0000000000000000000000000000000000000000..1583b81f268c18f7dc20c86d183d9924f98c5878
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/sandbox.py
@@ -0,0 +1,293 @@
+import os, sys, __builtin__, tempfile, operator, pkg_resources
+if os.name == "java":
+    import org.python.modules.posix.PosixModule as _os
+else:
+    _os = sys.modules[os.name]
+try:
+    _file = file
+except NameError:
+    _file = None
+_open = open
+from distutils.errors import DistutilsError
+__all__ = [
+    "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
+]
+def run_setup(setup_script, args):
+    """Run a distutils setup script, sandboxed in its directory"""
+    old_dir = os.getcwd()
+    save_argv = sys.argv[:]
+    save_path = sys.path[:]
+    setup_dir = os.path.abspath(os.path.dirname(setup_script))
+    temp_dir = os.path.join(setup_dir,'temp')
+    if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
+    save_tmp = tempfile.tempdir
+    save_modules = sys.modules.copy()
+    pr_state = pkg_resources.__getstate__()
+    try:
+        tempfile.tempdir = temp_dir
+        os.chdir(setup_dir)
+        try:
+            sys.argv[:] = [setup_script]+list(args)
+            sys.path.insert(0, setup_dir)
+            DirectorySandbox(setup_dir).run(
+                lambda: execfile(
+                    "setup.py",
+                    {'__file__':setup_script, '__name__':'__main__'}
+                )
+            )
+        except SystemExit, v:
+            if v.args and v.args[0]:
+                raise
+            # Normal exit, just return
+    finally:
+        pkg_resources.__setstate__(pr_state)
+        sys.modules.update(save_modules)
+        # remove any modules imported within the sandbox
+        del_modules = [
+            mod_name for mod_name in sys.modules
+            if mod_name not in save_modules
+            # exclude any encodings modules. See #285
+            and not mod_name.startswith('encodings.')
+        ]
+        map(sys.modules.__delitem__, del_modules)
+        os.chdir(old_dir)
+        sys.path[:] = save_path
+        sys.argv[:] = save_argv
+        tempfile.tempdir = save_tmp
+
+class AbstractSandbox:
+    """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
+
+    _active = False
+
+    def __init__(self):
+        self._attrs = [
+            name for name in dir(_os)
+                if not name.startswith('_') and hasattr(self,name)
+        ]
+
+    def _copy(self, source):
+        for name in self._attrs:
+            setattr(os, name, getattr(source,name))
+
+    def run(self, func):
+        """Run 'func' under os sandboxing"""
+        try:
+            self._copy(self)
+            if _file:
+                __builtin__.file = self._file
+            __builtin__.open = self._open
+            self._active = True
+            return func()
+        finally:
+            self._active = False
+            if _file:
+                __builtin__.file = _file
+            __builtin__.open = _open
+            self._copy(_os)
+
+
+    def _mk_dual_path_wrapper(name):
+        original = getattr(_os,name)
+        def wrap(self,src,dst,*args,**kw):
+            if self._active:
+                src,dst = self._remap_pair(name,src,dst,*args,**kw)
+            return original(src,dst,*args,**kw)
+        return wrap
+
+
+    for name in ["rename", "link", "symlink"]:
+        if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
+
+
+    def _mk_single_path_wrapper(name, original=None):
+        original = original or getattr(_os,name)
+        def wrap(self,path,*args,**kw):
+            if self._active:
+                path = self._remap_input(name,path,*args,**kw)
+            return original(path,*args,**kw)
+        return wrap
+
+    if _file:
+        _file = _mk_single_path_wrapper('file', _file)
+    _open = _mk_single_path_wrapper('open', _open)
+    for name in [
+        "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
+        "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
+        "startfile", "mkfifo", "mknod", "pathconf", "access"
+    ]:
+        if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
+
+
+    def _mk_single_with_return(name):
+        original = getattr(_os,name)
+        def wrap(self,path,*args,**kw):
+            if self._active:
+                path = self._remap_input(name,path,*args,**kw)
+                return self._remap_output(name, original(path,*args,**kw))
+            return original(path,*args,**kw)
+        return wrap
+
+    for name in ['readlink', 'tempnam']:
+        if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
+
+    def _mk_query(name):
+        original = getattr(_os,name)
+        def wrap(self,*args,**kw):
+            retval = original(*args,**kw)
+            if self._active:
+                return self._remap_output(name, retval)
+            return retval
+        return wrap
+
+    for name in ['getcwd', 'tmpnam']:
+        if hasattr(_os,name): locals()[name] = _mk_query(name)
+
+    def _validate_path(self,path):
+        """Called to remap or validate any path, whether input or output"""
+        return path
+
+    def _remap_input(self,operation,path,*args,**kw):
+        """Called for path inputs"""
+        return self._validate_path(path)
+
+    def _remap_output(self,operation,path):
+        """Called for path outputs"""
+        return self._validate_path(path)
+
+    def _remap_pair(self,operation,src,dst,*args,**kw):
+        """Called for path pairs like rename, link, and symlink operations"""
+        return (
+            self._remap_input(operation+'-from',src,*args,**kw),
+            self._remap_input(operation+'-to',dst,*args,**kw)
+        )
+
+
+if hasattr(os, 'devnull'):
+    _EXCEPTIONS = [os.devnull,]
+else:
+    _EXCEPTIONS = []
+
+try:
+    from win32com.client.gencache import GetGeneratePath
+    _EXCEPTIONS.append(GetGeneratePath())
+    del GetGeneratePath
+except ImportError:
+    # it appears pywin32 is not installed, so no need to exclude.
+    pass
+
+class DirectorySandbox(AbstractSandbox):
+    """Restrict operations to a single subdirectory - pseudo-chroot"""
+
+    write_ops = dict.fromkeys([
+        "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
+        "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
+    ])
+
+    def __init__(self, sandbox, exceptions=_EXCEPTIONS):
+        self._sandbox = os.path.normcase(os.path.realpath(sandbox))
+        self._prefix = os.path.join(self._sandbox,'')
+        self._exceptions = [os.path.normcase(os.path.realpath(path)) for path in exceptions]
+        AbstractSandbox.__init__(self)
+
+    def _violation(self, operation, *args, **kw):
+        raise SandboxViolation(operation, args, kw)
+
+    if _file:
+        def _file(self, path, mode='r', *args, **kw):
+            if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+                self._violation("file", path, mode, *args, **kw)
+            return _file(path,mode,*args,**kw)
+
+    def _open(self, path, mode='r', *args, **kw):
+        if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+            self._violation("open", path, mode, *args, **kw)
+        return _open(path,mode,*args,**kw)
+
+    def tmpnam(self):
+        self._violation("tmpnam")
+
+    def _ok(self,path):
+        active = self._active
+        try:
+            self._active = False
+            realpath = os.path.normcase(os.path.realpath(path))
+            if (self._exempted(realpath) or realpath == self._sandbox
+                or realpath.startswith(self._prefix)):
+                return True
+        finally:
+            self._active = active
+
+    def _exempted(self, filepath):
+        exception_matches = map(filepath.startswith, self._exceptions)
+        return True in exception_matches
+
+    def _remap_input(self,operation,path,*args,**kw):
+        """Called for path inputs"""
+        if operation in self.write_ops and not self._ok(path):
+            self._violation(operation, os.path.realpath(path), *args, **kw)
+        return path
+
+    def _remap_pair(self,operation,src,dst,*args,**kw):
+        """Called for path pairs like rename, link, and symlink operations"""
+        if not self._ok(src) or not self._ok(dst):
+            self._violation(operation, src, dst, *args, **kw)
+        return (src,dst)
+
+    def open(self, file, flags, mode=0777):
+        """Called for low-level os.open()"""
+        if flags & WRITE_FLAGS and not self._ok(file):
+            self._violation("os.open", file, flags, mode)
+        return _os.open(file,flags,mode)
+
+
+WRITE_FLAGS = reduce(
+    operator.or_,
+    [getattr(_os, a, 0) for a in
+        "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
+)
+
+
+
+
+class SandboxViolation(DistutilsError):
+    """A setup script attempted to modify the filesystem outside the sandbox"""
+
+    def __str__(self):
+        return """SandboxViolation: %s%r %s
+
+The package setup script has attempted to modify files on your system
+that are not within the EasyInstall build area, and has been aborted.
+
+This package cannot be safely installed by EasyInstall, and may not
+support alternate installation locations even if you run its setup
+script by hand.  Please inform the package's author and the EasyInstall
+maintainers to find out if a fix or workaround is available.""" % self.args
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
diff --git a/vendor/distribute-0.6.35/setuptools/script template (dev).py b/vendor/distribute-0.6.35/setuptools/script template (dev).py
new file mode 100644
index 0000000000000000000000000000000000000000..6dd9dd45259880e964e42282697a5c1b2d071fb5
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/script template (dev).py	
@@ -0,0 +1,6 @@
+# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = """%(spec)r"""
+from pkg_resources import require; require("""%(spec)r""")
+del require
+__file__ = """%(dev_path)r"""
+execfile(__file__)
diff --git a/vendor/distribute-0.6.35/setuptools/script template.py b/vendor/distribute-0.6.35/setuptools/script template.py
new file mode 100644
index 0000000000000000000000000000000000000000..8dd5d5100177d17e6bd52e566ddeb5909c778c03
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/script template.py	
@@ -0,0 +1,4 @@
+# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = """%(spec)r"""
+import pkg_resources
+pkg_resources.run_script("""%(spec)r""", """%(script_name)r""")
diff --git a/vendor/distribute-0.6.35/setuptools/tests/__init__.py b/vendor/distribute-0.6.35/setuptools/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6988a08d780806209cc3e54a123614cc87907f7
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/__init__.py
@@ -0,0 +1,349 @@
+"""Tests for the 'setuptools' package"""
+import sys
+import os
+import unittest
+import doctest
+import distutils.core
+import distutils.cmd
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+from distutils.core import Extension
+from distutils.version import LooseVersion
+
+import setuptools.dist
+import setuptools.depends as dep
+from setuptools import Feature
+from setuptools.depends import Require
+
+def additional_tests():
+    import doctest, unittest
+    suite = unittest.TestSuite((
+        doctest.DocFileSuite(
+            os.path.join('tests', 'api_tests.txt'),
+            optionflags=doctest.ELLIPSIS, package='pkg_resources',
+            ),
+        ))
+    if sys.platform == 'win32':
+        suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
+    return suite
+
+def makeSetup(**args):
+    """Return distribution from 'setup(**args)', without executing commands"""
+
+    distutils.core._setup_stop_after = "commandline"
+
+    # Don't let system command line leak into tests!
+    args.setdefault('script_args',['install'])
+
+    try:
+        return setuptools.setup(**args)
+    finally:
+        distutils.core._setup_stop_after = None
+
+
+class DependsTests(unittest.TestCase):
+
+    def testExtractConst(self):
+        if not hasattr(dep, 'extract_constant'):
+            # skip on non-bytecode platforms
+            return
+
+        def f1():
+            global x, y, z
+            x = "test"
+            y = z
+
+        # unrecognized name
+        self.assertEqual(dep.extract_constant(f1.func_code,'q', -1), None)
+
+        # constant assigned
+        self.assertEqual(dep.extract_constant(f1.func_code,'x', -1), "test")
+
+        # expression assigned
+        self.assertEqual(dep.extract_constant(f1.func_code,'y', -1), -1)
+
+        # recognized name, not assigned
+        self.assertEqual(dep.extract_constant(f1.func_code,'z', -1), None)
+
+    def testFindModule(self):
+        self.assertRaises(ImportError, dep.find_module, 'no-such.-thing')
+        self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent')
+        f,p,i = dep.find_module('setuptools.tests')
+        f.close()
+
+    def testModuleExtract(self):
+        if not hasattr(dep, 'get_module_constant'):
+            # skip on non-bytecode platforms
+            return
+
+        from email import __version__
+        self.assertEqual(
+            dep.get_module_constant('email','__version__'), __version__
+        )
+        self.assertEqual(
+            dep.get_module_constant('sys','version'), sys.version
+        )
+        self.assertEqual(
+            dep.get_module_constant('setuptools.tests','__doc__'),__doc__
+        )
+
+    def testRequire(self):
+        if not hasattr(dep, 'extract_constant'):
+            # skip on non-bytecode platformsh
+            return
+
+        req = Require('Email','1.0.3','email')
+
+        self.assertEqual(req.name, 'Email')
+        self.assertEqual(req.module, 'email')
+        self.assertEqual(req.requested_version, '1.0.3')
+        self.assertEqual(req.attribute, '__version__')
+        self.assertEqual(req.full_name(), 'Email-1.0.3')
+
+        from email import __version__
+        self.assertEqual(req.get_version(), __version__)
+        self.assertTrue(req.version_ok('1.0.9'))
+        self.assertTrue(not req.version_ok('0.9.1'))
+        self.assertTrue(not req.version_ok('unknown'))
+
+        self.assertTrue(req.is_present())
+        self.assertTrue(req.is_current())
+
+        req = Require('Email 3000','03000','email',format=LooseVersion)
+        self.assertTrue(req.is_present())
+        self.assertTrue(not req.is_current())
+        self.assertTrue(not req.version_ok('unknown'))
+
+        req = Require('Do-what-I-mean','1.0','d-w-i-m')
+        self.assertTrue(not req.is_present())
+        self.assertTrue(not req.is_current())
+
+        req = Require('Tests', None, 'tests', homepage="http://example.com")
+        self.assertEqual(req.format, None)
+        self.assertEqual(req.attribute, None)
+        self.assertEqual(req.requested_version, None)
+        self.assertEqual(req.full_name(), 'Tests')
+        self.assertEqual(req.homepage, 'http://example.com')
+
+        paths = [os.path.dirname(p) for p in __path__]
+        self.assertTrue(req.is_present(paths))
+        self.assertTrue(req.is_current(paths))
+
+
+class DistroTests(unittest.TestCase):
+
+    def setUp(self):
+        self.e1 = Extension('bar.ext',['bar.c'])
+        self.e2 = Extension('c.y', ['y.c'])
+
+        self.dist = makeSetup(
+            packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
+            py_modules=['b.d','x'],
+            ext_modules = (self.e1, self.e2),
+            package_dir = {},
+        )
+
+    def testDistroType(self):
+        self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution))
+
+    def testExcludePackage(self):
+        self.dist.exclude_package('a')
+        self.assertEqual(self.dist.packages, ['b','c'])
+
+        self.dist.exclude_package('b')
+        self.assertEqual(self.dist.packages, ['c'])
+        self.assertEqual(self.dist.py_modules, ['x'])
+        self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
+
+        self.dist.exclude_package('c')
+        self.assertEqual(self.dist.packages, [])
+        self.assertEqual(self.dist.py_modules, ['x'])
+        self.assertEqual(self.dist.ext_modules, [self.e1])
+
+        # test removals from unspecified options
+        makeSetup().exclude_package('x')
+
+    def testIncludeExclude(self):
+        # remove an extension
+        self.dist.exclude(ext_modules=[self.e1])
+        self.assertEqual(self.dist.ext_modules, [self.e2])
+
+        # add it back in
+        self.dist.include(ext_modules=[self.e1])
+        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+
+        # should not add duplicate
+        self.dist.include(ext_modules=[self.e1])
+        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+
+    def testExcludePackages(self):
+        self.dist.exclude(packages=['c','b','a'])
+        self.assertEqual(self.dist.packages, [])
+        self.assertEqual(self.dist.py_modules, ['x'])
+        self.assertEqual(self.dist.ext_modules, [self.e1])
+
+    def testEmpty(self):
+        dist = makeSetup()
+        dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+        dist = makeSetup()
+        dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+
+    def testContents(self):
+        self.assertTrue(self.dist.has_contents_for('a'))
+        self.dist.exclude_package('a')
+        self.assertTrue(not self.dist.has_contents_for('a'))
+
+        self.assertTrue(self.dist.has_contents_for('b'))
+        self.dist.exclude_package('b')
+        self.assertTrue(not self.dist.has_contents_for('b'))
+
+        self.assertTrue(self.dist.has_contents_for('c'))
+        self.dist.exclude_package('c')
+        self.assertTrue(not self.dist.has_contents_for('c'))
+
+    def testInvalidIncludeExclude(self):
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, nonexistent_option='x'
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, nonexistent_option='x'
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, packages={'x':'y'}
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, packages={'x':'y'}
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, ext_modules={'x':'y'}
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, ext_modules={'x':'y'}
+        )
+
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, package_dir=['q']
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, package_dir=['q']
+        )
+
+
+class FeatureTests(unittest.TestCase):
+
+    def setUp(self):
+        self.req = Require('Distutils','1.0.3','distutils')
+        self.dist = makeSetup(
+            features={
+                'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
+                'bar': Feature("bar",  standard=True, packages=['pkg.bar'],
+                               py_modules=['bar_et'], remove=['bar.ext'],
+                       ),
+                'baz': Feature(
+                        "baz", optional=False, packages=['pkg.baz'],
+                        scripts = ['scripts/baz_it'],
+                        libraries=[('libfoo','foo/foofoo.c')]
+                       ),
+                'dwim': Feature("DWIM", available=False, remove='bazish'),
+            },
+            script_args=['--without-bar', 'install'],
+            packages = ['pkg.bar', 'pkg.foo'],
+            py_modules = ['bar_et', 'bazish'],
+            ext_modules = [Extension('bar.ext',['bar.c'])]
+        )
+
+    def testDefaults(self):
+        self.assertTrue(not
+            Feature(
+                "test",standard=True,remove='x',available=False
+            ).include_by_default()
+        )
+        self.assertTrue(
+            Feature("test",standard=True,remove='x').include_by_default()
+        )
+        # Feature must have either kwargs, removes, or require_features
+        self.assertRaises(DistutilsSetupError, Feature, "test")
+
+    def testAvailability(self):
+        self.assertRaises(
+            DistutilsPlatformError,
+            self.dist.features['dwim'].include_in, self.dist
+        )
+
+    def testFeatureOptions(self):
+        dist = self.dist
+        self.assertTrue(
+            ('with-dwim',None,'include DWIM') in dist.feature_options
+        )
+        self.assertTrue(
+            ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
+        )
+        self.assertTrue(
+            ('with-bar',None,'include bar (default)') in dist.feature_options
+        )
+        self.assertTrue(
+            ('without-bar',None,'exclude bar') in dist.feature_options
+        )
+        self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
+        self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
+        self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
+        self.assertTrue(not 'without-baz' in dist.feature_negopt)
+
+    def testUseFeatures(self):
+        dist = self.dist
+        self.assertEqual(dist.with_foo,1)
+        self.assertEqual(dist.with_bar,0)
+        self.assertEqual(dist.with_baz,1)
+        self.assertTrue(not 'bar_et' in dist.py_modules)
+        self.assertTrue(not 'pkg.bar' in dist.packages)
+        self.assertTrue('pkg.baz' in dist.packages)
+        self.assertTrue('scripts/baz_it' in dist.scripts)
+        self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries)
+        self.assertEqual(dist.ext_modules,[])
+        self.assertEqual(dist.require_features, [self.req])
+
+        # If we ask for bar, it should fail because we explicitly disabled
+        # it on the command line
+        self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
+
+    def testFeatureWithInvalidRemove(self):
+        self.assertRaises(
+            SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
+        )
+
+class TestCommandTests(unittest.TestCase):
+
+    def testTestIsCommand(self):
+        test_cmd = makeSetup().get_command_obj('test')
+        self.assertTrue(isinstance(test_cmd, distutils.cmd.Command))
+
+    def testLongOptSuiteWNoDefault(self):
+        ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
+        ts1 = ts1.get_command_obj('test')
+        ts1.ensure_finalized()
+        self.assertEqual(ts1.test_suite, 'foo.tests.suite')
+
+    def testDefaultSuite(self):
+        ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
+        ts2.ensure_finalized()
+        self.assertEqual(ts2.test_suite, 'bar.tests.suite')
+
+    def testDefaultWModuleOnCmdLine(self):
+        ts3 = makeSetup(
+            test_suite='bar.tests',
+            script_args=['test','-m','foo.tests']
+        ).get_command_obj('test')
+        ts3.ensure_finalized()
+        self.assertEqual(ts3.test_module, 'foo.tests')
+        self.assertEqual(ts3.test_suite,  'foo.tests.test_suite')
+
+    def testConflictingOptions(self):
+        ts4 = makeSetup(
+            script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
+        ).get_command_obj('test')
+        self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
+
+    def testNoSuite(self):
+        ts5 = makeSetup().get_command_obj('test')
+        ts5.ensure_finalized()
+        self.assertEqual(ts5.test_suite, None)
diff --git a/vendor/distribute-0.6.35/setuptools/tests/doctest.py b/vendor/distribute-0.6.35/setuptools/tests/doctest.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc1e06c398b1e861f04bdef5eba9eb6b92b13aa7
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/doctest.py
@@ -0,0 +1,2683 @@
+# Module doctest.
+# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
+# Major enhancements and refactoring by:
+#     Jim Fulton
+#     Edward Loper
+
+# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
+
+try:
+    basestring
+except NameError:
+    basestring = str,unicode
+
+try:
+    enumerate
+except NameError:
+    def enumerate(seq):
+        return zip(range(len(seq)),seq)
+
+r"""Module doctest -- a framework for running examples in docstrings.
+
+In simplest use, end each module M to be tested with:
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == "__main__":
+    _test()
+
+Then running the module as a script will cause the examples in the
+docstrings to get executed and verified:
+
+python M.py
+
+This won't display anything unless an example fails, in which case the
+failing example(s) and the cause(s) of the failure(s) are printed to stdout
+(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
+line of output is "Test failed.".
+
+Run it with the -v switch instead:
+
+python M.py -v
+
+and a detailed report of all examples tried is printed to stdout, along
+with assorted summaries at the end.
+
+You can force verbose mode by passing "verbose=True" to testmod, or prohibit
+it by passing "verbose=False".  In either of those cases, sys.argv is not
+examined by testmod.
+
+There are a variety of other ways to run doctests, including integration
+with the unittest framework, and support for running non-Python text
+files containing doctests.  There are also many ways to override parts
+of doctest's default behaviors.  See the Library Reference Manual for
+details.
+"""
+
+__docformat__ = 'reStructuredText en'
+
+__all__ = [
+    # 0, Option Flags
+    'register_optionflag',
+    'DONT_ACCEPT_TRUE_FOR_1',
+    'DONT_ACCEPT_BLANKLINE',
+    'NORMALIZE_WHITESPACE',
+    'ELLIPSIS',
+    'IGNORE_EXCEPTION_DETAIL',
+    'COMPARISON_FLAGS',
+    'REPORT_UDIFF',
+    'REPORT_CDIFF',
+    'REPORT_NDIFF',
+    'REPORT_ONLY_FIRST_FAILURE',
+    'REPORTING_FLAGS',
+    # 1. Utility Functions
+    'is_private',
+    # 2. Example & DocTest
+    'Example',
+    'DocTest',
+    # 3. Doctest Parser
+    'DocTestParser',
+    # 4. Doctest Finder
+    'DocTestFinder',
+    # 5. Doctest Runner
+    'DocTestRunner',
+    'OutputChecker',
+    'DocTestFailure',
+    'UnexpectedException',
+    'DebugRunner',
+    # 6. Test Functions
+    'testmod',
+    'testfile',
+    'run_docstring_examples',
+    # 7. Tester
+    'Tester',
+    # 8. Unittest Support
+    'DocTestSuite',
+    'DocFileSuite',
+    'set_unittest_reportflags',
+    # 9. Debugging Support
+    'script_from_examples',
+    'testsource',
+    'debug_src',
+    'debug',
+]
+
+import __future__
+
+import sys, traceback, inspect, linecache, os, re, types
+import unittest, difflib, pdb, tempfile
+import warnings
+from StringIO import StringIO
+
+# Don't whine about the deprecated is_private function in this
+# module's tests.
+warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
+                        __name__, 0)
+
+# There are 4 basic classes:
+#  - Example: a <source, want> pair, plus an intra-docstring line number.
+#  - DocTest: a collection of examples, parsed from a docstring, plus
+#    info about where the docstring came from (name, filename, lineno).
+#  - DocTestFinder: extracts DocTests from a given object's docstring and
+#    its contained objects' docstrings.
+#  - DocTestRunner: runs DocTest cases, and accumulates statistics.
+#
+# So the basic picture is:
+#
+#                             list of:
+# +------+                   +---------+                   +-------+
+# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
+# +------+                   +---------+                   +-------+
+#                            | Example |
+#                            |   ...   |
+#                            | Example |
+#                            +---------+
+
+# Option constants.
+
+OPTIONFLAGS_BY_NAME = {}
+def register_optionflag(name):
+    flag = 1 << len(OPTIONFLAGS_BY_NAME)
+    OPTIONFLAGS_BY_NAME[name] = flag
+    return flag
+
+DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
+DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
+NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
+ELLIPSIS = register_optionflag('ELLIPSIS')
+IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
+
+COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
+                    DONT_ACCEPT_BLANKLINE |
+                    NORMALIZE_WHITESPACE |
+                    ELLIPSIS |
+                    IGNORE_EXCEPTION_DETAIL)
+
+REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
+REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
+REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
+REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+
+REPORTING_FLAGS = (REPORT_UDIFF |
+                   REPORT_CDIFF |
+                   REPORT_NDIFF |
+                   REPORT_ONLY_FIRST_FAILURE)
+
+# Special string markers for use in `want` strings:
+BLANKLINE_MARKER = '<BLANKLINE>'
+ELLIPSIS_MARKER = '...'
+
+######################################################################
+## Table of Contents
+######################################################################
+#  1. Utility Functions
+#  2. Example & DocTest -- store test cases
+#  3. DocTest Parser -- extracts examples from strings
+#  4. DocTest Finder -- extracts test cases from objects
+#  5. DocTest Runner -- runs test cases
+#  6. Test Functions -- convenient wrappers for testing
+#  7. Tester Class -- for backwards compatibility
+#  8. Unittest Support
+#  9. Debugging Support
+# 10. Example Usage
+
+######################################################################
+## 1. Utility Functions
+######################################################################
+
+def is_private(prefix, base):
+    """prefix, base -> true iff name prefix + "." + base is "private".
+
+    Prefix may be an empty string, and base does not contain a period.
+    Prefix is ignored (although functions you write conforming to this
+    protocol may make use of it).
+    Return true iff base begins with an (at least one) underscore, but
+    does not both begin and end with (at least) two underscores.
+
+    >>> is_private("a.b", "my_func")
+    False
+    >>> is_private("____", "_my_func")
+    True
+    >>> is_private("someclass", "__init__")
+    False
+    >>> is_private("sometypo", "__init_")
+    True
+    >>> is_private("x.y.z", "_")
+    True
+    >>> is_private("_x.y.z", "__")
+    False
+    >>> is_private("", "")  # senseless but consistent
+    False
+    """
+    warnings.warn("is_private is deprecated; it wasn't useful; "
+                  "examine DocTestFinder.find() lists instead",
+                  DeprecationWarning, stacklevel=2)
+    return base[:1] == "_" and not base[:2] == "__" == base[-2:]
+
+def _extract_future_flags(globs):
+    """
+    Return the compiler-flags associated with the future features that
+    have been imported into the given namespace (globs).
+    """
+    flags = 0
+    for fname in __future__.all_feature_names:
+        feature = globs.get(fname, None)
+        if feature is getattr(__future__, fname):
+            flags |= feature.compiler_flag
+    return flags
+
+def _normalize_module(module, depth=2):
+    """
+    Return the module specified by `module`.  In particular:
+      - If `module` is a module, then return module.
+      - If `module` is a string, then import and return the
+        module with that name.
+      - If `module` is None, then return the calling module.
+        The calling module is assumed to be the module of
+        the stack frame at the given depth in the call stack.
+    """
+    if inspect.ismodule(module):
+        return module
+    elif isinstance(module, (str, unicode)):
+        return __import__(module, globals(), locals(), ["*"])
+    elif module is None:
+        return sys.modules[sys._getframe(depth).f_globals['__name__']]
+    else:
+        raise TypeError("Expected a module, string, or None")
+
+def _indent(s, indent=4):
+    """
+    Add the given number of space characters to the beginning every
+    non-blank line in `s`, and return the result.
+    """
+    # This regexp matches the start of non-blank lines:
+    return re.sub('(?m)^(?!$)', indent*' ', s)
+
+def _exception_traceback(exc_info):
+    """
+    Return a string containing a traceback message for the given
+    exc_info tuple (as returned by sys.exc_info()).
+    """
+    # Get a traceback message.
+    excout = StringIO()
+    exc_type, exc_val, exc_tb = exc_info
+    traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
+    return excout.getvalue()
+
+# Override some StringIO methods.
+class _SpoofOut(StringIO):
+    def getvalue(self):
+        result = StringIO.getvalue(self)
+        # If anything at all was written, make sure there's a trailing
+        # newline.  There's no way for the expected output to indicate
+        # that a trailing newline is missing.
+        if result and not result.endswith("\n"):
+            result += "\n"
+        # Prevent softspace from screwing up the next test case, in
+        # case they used print with a trailing comma in an example.
+        if hasattr(self, "softspace"):
+            del self.softspace
+        return result
+
+    def truncate(self,   size=None):
+        StringIO.truncate(self, size)
+        if hasattr(self, "softspace"):
+            del self.softspace
+
+# Worst-case linear-time ellipsis matching.
+def _ellipsis_match(want, got):
+    """
+    Essentially the only subtle case:
+    >>> _ellipsis_match('aa...aa', 'aaa')
+    False
+    """
+    if want.find(ELLIPSIS_MARKER)==-1:
+        return want == got
+
+    # Find "the real" strings.
+    ws = want.split(ELLIPSIS_MARKER)
+    assert len(ws) >= 2
+
+    # Deal with exact matches possibly needed at one or both ends.
+    startpos, endpos = 0, len(got)
+    w = ws[0]
+    if w:   # starts with exact match
+        if got.startswith(w):
+            startpos = len(w)
+            del ws[0]
+        else:
+            return False
+    w = ws[-1]
+    if w:   # ends with exact match
+        if got.endswith(w):
+            endpos -= len(w)
+            del ws[-1]
+        else:
+            return False
+
+    if startpos > endpos:
+        # Exact end matches required more characters than we have, as in
+        # _ellipsis_match('aa...aa', 'aaa')
+        return False
+
+    # For the rest, we only need to find the leftmost non-overlapping
+    # match for each piece.  If there's no overall match that way alone,
+    # there's no overall match period.
+    for w in ws:
+        # w may be '' at times, if there are consecutive ellipses, or
+        # due to an ellipsis at the start or end of `want`.  That's OK.
+        # Search for an empty string succeeds, and doesn't change startpos.
+        startpos = got.find(w, startpos, endpos)
+        if startpos < 0:
+            return False
+        startpos += len(w)
+
+    return True
+
+def _comment_line(line):
+    "Return a commented form of the given line"
+    line = line.rstrip()
+    if line:
+        return '# '+line
+    else:
+        return '#'
+
+class _OutputRedirectingPdb(pdb.Pdb):
+    """
+    A specialized version of the python debugger that redirects stdout
+    to a given stream when interacting with the user.  Stdout is *not*
+    redirected when traced code is executed.
+    """
+    def __init__(self, out):
+        self.__out = out
+        pdb.Pdb.__init__(self)
+
+    def trace_dispatch(self, *args):
+        # Redirect stdout to the given stream.
+        save_stdout = sys.stdout
+        sys.stdout = self.__out
+        # Call Pdb's trace dispatch method.
+        try:
+            return pdb.Pdb.trace_dispatch(self, *args)
+        finally:
+            sys.stdout = save_stdout
+
+# [XX] Normalize with respect to os.path.pardir?
+def _module_relative_path(module, path):
+    if not inspect.ismodule(module):
+        raise TypeError, 'Expected a module: %r' % module
+    if path.startswith('/'):
+        raise ValueError, 'Module-relative files may not have absolute paths'
+
+    # Find the base directory for the path.
+    if hasattr(module, '__file__'):
+        # A normal module/package
+        basedir = os.path.split(module.__file__)[0]
+    elif module.__name__ == '__main__':
+        # An interactive session.
+        if len(sys.argv)>0 and sys.argv[0] != '':
+            basedir = os.path.split(sys.argv[0])[0]
+        else:
+            basedir = os.curdir
+    else:
+        # A module w/o __file__ (this includes builtins)
+        raise ValueError("Can't resolve paths relative to the module " +
+                         module + " (it has no __file__)")
+
+    # Combine the base directory and the path.
+    return os.path.join(basedir, *(path.split('/')))
+
+######################################################################
+## 2. Example & DocTest
+######################################################################
+## - An "example" is a <source, want> pair, where "source" is a
+##   fragment of source code, and "want" is the expected output for
+##   "source."  The Example class also includes information about
+##   where the example was extracted from.
+##
+## - A "doctest" is a collection of examples, typically extracted from
+##   a string (such as an object's docstring).  The DocTest class also
+##   includes information about where the string was extracted from.
+
+class Example:
+    """
+    A single doctest example, consisting of source code and expected
+    output.  `Example` defines the following attributes:
+
+      - source: A single Python statement, always ending with a newline.
+        The constructor adds a newline if needed.
+
+      - want: The expected output from running the source code (either
+        from stdout, or a traceback in case of exception).  `want` ends
+        with a newline unless it's empty, in which case it's an empty
+        string.  The constructor adds a newline if needed.
+
+      - exc_msg: The exception message generated by the example, if
+        the example is expected to generate an exception; or `None` if
+        it is not expected to generate an exception.  This exception
+        message is compared against the return value of
+        `traceback.format_exception_only()`.  `exc_msg` ends with a
+        newline unless it's `None`.  The constructor adds a newline
+        if needed.
+
+      - lineno: The line number within the DocTest string containing
+        this Example where the Example begins.  This line number is
+        zero-based, with respect to the beginning of the DocTest.
+
+      - indent: The example's indentation in the DocTest string.
+        I.e., the number of space characters that preceed the
+        example's first prompt.
+
+      - options: A dictionary mapping from option flags to True or
+        False, which is used to override default options for this
+        example.  Any option flags not contained in this dictionary
+        are left at their default value (as specified by the
+        DocTestRunner's optionflags).  By default, no options are set.
+    """
+    def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
+                 options=None):
+        # Normalize inputs.
+        if not source.endswith('\n'):
+            source += '\n'
+        if want and not want.endswith('\n'):
+            want += '\n'
+        if exc_msg is not None and not exc_msg.endswith('\n'):
+            exc_msg += '\n'
+        # Store properties.
+        self.source = source
+        self.want = want
+        self.lineno = lineno
+        self.indent = indent
+        if options is None: options = {}
+        self.options = options
+        self.exc_msg = exc_msg
+
+class DocTest:
+    """
+    A collection of doctest examples that should be run in a single
+    namespace.  Each `DocTest` defines the following attributes:
+
+      - examples: the list of examples.
+
+      - globs: The namespace (aka globals) that the examples should
+        be run in.
+
+      - name: A name identifying the DocTest (typically, the name of
+        the object whose docstring this DocTest was extracted from).
+
+      - filename: The name of the file that this DocTest was extracted
+        from, or `None` if the filename is unknown.
+
+      - lineno: The line number within filename where this DocTest
+        begins, or `None` if the line number is unavailable.  This
+        line number is zero-based, with respect to the beginning of
+        the file.
+
+      - docstring: The string that the examples were extracted from,
+        or `None` if the string is unavailable.
+    """
+    def __init__(self, examples, globs, name, filename, lineno, docstring):
+        """
+        Create a new DocTest containing the given examples.  The
+        DocTest's globals are initialized with a copy of `globs`.
+        """
+        assert not isinstance(examples, basestring), \
+               "DocTest no longer accepts str; use DocTestParser instead"
+        self.examples = examples
+        self.docstring = docstring
+        self.globs = globs.copy()
+        self.name = name
+        self.filename = filename
+        self.lineno = lineno
+
+    def __repr__(self):
+        if len(self.examples) == 0:
+            examples = 'no examples'
+        elif len(self.examples) == 1:
+            examples = '1 example'
+        else:
+            examples = '%d examples' % len(self.examples)
+        return ('<DocTest %s from %s:%s (%s)>' %
+                (self.name, self.filename, self.lineno, examples))
+
+
+    # This lets us sort tests by name:
+    def __cmp__(self, other):
+        if not isinstance(other, DocTest):
+            return -1
+        return cmp((self.name, self.filename, self.lineno, id(self)),
+                   (other.name, other.filename, other.lineno, id(other)))
+
+######################################################################
+## 3. DocTestParser
+######################################################################
+
+class DocTestParser:
+    """
+    A class used to parse strings containing doctest examples.
+    """
+    # This regular expression is used to find doctest examples in a
+    # string.  It defines three groups: `source` is the source code
+    # (including leading indentation and prompts); `indent` is the
+    # indentation of the first (PS1) line of the source code; and
+    # `want` is the expected output (including leading indentation).
+    _EXAMPLE_RE = re.compile(r'''
+        # Source consists of a PS1 line followed by zero or more PS2 lines.
+        (?P<source>
+            (?:^(?P<indent> [ ]*) >>>    .*)    # PS1 line
+            (?:\n           [ ]*  \.\.\. .*)*)  # PS2 lines
+        \n?
+        # Want consists of any non-blank lines that do not start with PS1.
+        (?P<want> (?:(?![ ]*$)    # Not a blank line
+                     (?![ ]*>>>)  # Not a line starting with PS1
+                     .*$\n?       # But any other line
+                  )*)
+        ''', re.MULTILINE | re.VERBOSE)
+
+    # A regular expression for handling `want` strings that contain
+    # expected exceptions.  It divides `want` into three pieces:
+    #    - the traceback header line (`hdr`)
+    #    - the traceback stack (`stack`)
+    #    - the exception message (`msg`), as generated by
+    #      traceback.format_exception_only()
+    # `msg` may have multiple lines.  We assume/require that the
+    # exception message is the first non-indented line starting with a word
+    # character following the traceback header line.
+    _EXCEPTION_RE = re.compile(r"""
+        # Grab the traceback header.  Different versions of Python have
+        # said different things on the first traceback line.
+        ^(?P<hdr> Traceback\ \(
+            (?: most\ recent\ call\ last
+            |   innermost\ last
+            ) \) :
+        )
+        \s* $                # toss trailing whitespace on the header.
+        (?P<stack> .*?)      # don't blink: absorb stuff until...
+        ^ (?P<msg> \w+ .*)   #     a line *starts* with alphanum.
+        """, re.VERBOSE | re.MULTILINE | re.DOTALL)
+
+    # A callable returning a true value iff its argument is a blank line
+    # or contains a single comment.
+    _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
+
+    def parse(self, string, name='<string>'):
+        """
+        Divide the given string into examples and intervening text,
+        and return them as a list of alternating Examples and strings.
+        Line numbers for the Examples are 0-based.  The optional
+        argument `name` is a name identifying this string, and is only
+        used for error messages.
+        """
+        string = string.expandtabs()
+        # If all lines begin with the same indentation, then strip it.
+        min_indent = self._min_indent(string)
+        if min_indent > 0:
+            string = '\n'.join([l[min_indent:] for l in string.split('\n')])
+
+        output = []
+        charno, lineno = 0, 0
+        # Find all doctest examples in the string:
+        for m in self._EXAMPLE_RE.finditer(string):
+            # Add the pre-example text to `output`.
+            output.append(string[charno:m.start()])
+            # Update lineno (lines before this example)
+            lineno += string.count('\n', charno, m.start())
+            # Extract info from the regexp match.
+            (source, options, want, exc_msg) = \
+                     self._parse_example(m, name, lineno)
+            # Create an Example, and add it to the list.
+            if not self._IS_BLANK_OR_COMMENT(source):
+                output.append( Example(source, want, exc_msg,
+                                    lineno=lineno,
+                                    indent=min_indent+len(m.group('indent')),
+                                    options=options) )
+            # Update lineno (lines inside this example)
+            lineno += string.count('\n', m.start(), m.end())
+            # Update charno.
+            charno = m.end()
+        # Add any remaining post-example text to `output`.
+        output.append(string[charno:])
+        return output
+
+    def get_doctest(self, string, globs, name, filename, lineno):
+        """
+        Extract all doctest examples from the given string, and
+        collect them into a `DocTest` object.
+
+        `globs`, `name`, `filename`, and `lineno` are attributes for
+        the new `DocTest` object.  See the documentation for `DocTest`
+        for more information.
+        """
+        return DocTest(self.get_examples(string, name), globs,
+                       name, filename, lineno, string)
+
+    def get_examples(self, string, name='<string>'):
+        """
+        Extract all doctest examples from the given string, and return
+        them as a list of `Example` objects.  Line numbers are
+        0-based, because it's most common in doctests that nothing
+        interesting appears on the same line as opening triple-quote,
+        and so the first interesting line is called \"line 1\" then.
+
+        The optional argument `name` is a name identifying this
+        string, and is only used for error messages.
+        """
+        return [x for x in self.parse(string, name)
+                if isinstance(x, Example)]
+
+    def _parse_example(self, m, name, lineno):
+        """
+        Given a regular expression match from `_EXAMPLE_RE` (`m`),
+        return a pair `(source, want)`, where `source` is the matched
+        example's source code (with prompts and indentation stripped);
+        and `want` is the example's expected output (with indentation
+        stripped).
+
+        `name` is the string's name, and `lineno` is the line number
+        where the example starts; both are used for error messages.
+        """
+        # Get the example's indentation level.
+        indent = len(m.group('indent'))
+
+        # Divide source into lines; check that they're properly
+        # indented; and then strip their indentation & prompts.
+        source_lines = m.group('source').split('\n')
+        self._check_prompt_blank(source_lines, indent, name, lineno)
+        self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
+        source = '\n'.join([sl[indent+4:] for sl in source_lines])
+
+        # Divide want into lines; check that it's properly indented; and
+        # then strip the indentation.  Spaces before the last newline should
+        # be preserved, so plain rstrip() isn't good enough.
+        want = m.group('want')
+        want_lines = want.split('\n')
+        if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
+            del want_lines[-1]  # forget final newline & spaces after it
+        self._check_prefix(want_lines, ' '*indent, name,
+                           lineno + len(source_lines))
+        want = '\n'.join([wl[indent:] for wl in want_lines])
+
+        # If `want` contains a traceback message, then extract it.
+        m = self._EXCEPTION_RE.match(want)
+        if m:
+            exc_msg = m.group('msg')
+        else:
+            exc_msg = None
+
+        # Extract options from the source.
+        options = self._find_options(source, name, lineno)
+
+        return source, options, want, exc_msg
+
+    # This regular expression looks for option directives in the
+    # source code of an example.  Option directives are comments
+    # starting with "doctest:".  Warning: this may give false
+    # positives for string-literals that contain the string
+    # "#doctest:".  Eliminating these false positives would require
+    # actually parsing the string; but we limit them by ignoring any
+    # line containing "#doctest:" that is *followed* by a quote mark.
+    _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
+                                      re.MULTILINE)
+
+    def _find_options(self, source, name, lineno):
+        """
+        Return a dictionary containing option overrides extracted from
+        option directives in the given source string.
+
+        `name` is the string's name, and `lineno` is the line number
+        where the example starts; both are used for error messages.
+        """
+        options = {}
+        # (note: with the current regexp, this will match at most once:)
+        for m in self._OPTION_DIRECTIVE_RE.finditer(source):
+            option_strings = m.group(1).replace(',', ' ').split()
+            for option in option_strings:
+                if (option[0] not in '+-' or
+                    option[1:] not in OPTIONFLAGS_BY_NAME):
+                    raise ValueError('line %r of the doctest for %s '
+                                     'has an invalid option: %r' %
+                                     (lineno+1, name, option))
+                flag = OPTIONFLAGS_BY_NAME[option[1:]]
+                options[flag] = (option[0] == '+')
+        if options and self._IS_BLANK_OR_COMMENT(source):
+            raise ValueError('line %r of the doctest for %s has an option '
+                             'directive on a line with no example: %r' %
+                             (lineno, name, source))
+        return options
+
+    # This regular expression finds the indentation of every non-blank
+    # line in a string.
+    _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
+
+    def _min_indent(self, s):
+        "Return the minimum indentation of any non-blank line in `s`"
+        indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
+        if len(indents) > 0:
+            return min(indents)
+        else:
+            return 0
+
+    def _check_prompt_blank(self, lines, indent, name, lineno):
+        """
+        Given the lines of a source string (including prompts and
+        leading indentation), check to make sure that every prompt is
+        followed by a space character.  If any line is not followed by
+        a space character, then raise ValueError.
+        """
+        for i, line in enumerate(lines):
+            if len(line) >= indent+4 and line[indent+3] != ' ':
+                raise ValueError('line %r of the docstring for %s '
+                                 'lacks blank after %s: %r' %
+                                 (lineno+i+1, name,
+                                  line[indent:indent+3], line))
+
+    def _check_prefix(self, lines, prefix, name, lineno):
+        """
+        Check that every line in the given list starts with the given
+        prefix; if any line does not, then raise a ValueError.
+        """
+        for i, line in enumerate(lines):
+            if line and not line.startswith(prefix):
+                raise ValueError('line %r of the docstring for %s has '
+                                 'inconsistent leading whitespace: %r' %
+                                 (lineno+i+1, name, line))
+
+
+######################################################################
+## 4. DocTest Finder
+######################################################################
+
+class DocTestFinder:
+    """
+    A class used to extract the DocTests that are relevant to a given
+    object, from its docstring and the docstrings of its contained
+    objects.  Doctests can currently be extracted from the following
+    object types: modules, functions, classes, methods, staticmethods,
+    classmethods, and properties.
+    """
+
+    def __init__(self, verbose=False, parser=DocTestParser(),
+                 recurse=True, _namefilter=None, exclude_empty=True):
+        """
+        Create a new doctest finder.
+
+        The optional argument `parser` specifies a class or
+        function that should be used to create new DocTest objects (or
+        objects that implement the same interface as DocTest).  The
+        signature for this factory function should match the signature
+        of the DocTest constructor.
+
+        If the optional argument `recurse` is false, then `find` will
+        only examine the given object, and not any contained objects.
+
+        If the optional argument `exclude_empty` is false, then `find`
+        will include tests for objects with empty docstrings.
+        """
+        self._parser = parser
+        self._verbose = verbose
+        self._recurse = recurse
+        self._exclude_empty = exclude_empty
+        # _namefilter is undocumented, and exists only for temporary backward-
+        # compatibility support of testmod's deprecated isprivate mess.
+        self._namefilter = _namefilter
+
+    def find(self, obj, name=None, module=None, globs=None,
+             extraglobs=None):
+        """
+        Return a list of the DocTests that are defined by the given
+        object's docstring, or by any of its contained objects'
+        docstrings.
+
+        The optional parameter `module` is the module that contains
+        the given object.  If the module is not specified or is None, then
+        the test finder will attempt to automatically determine the
+        correct module.  The object's module is used:
+
+            - As a default namespace, if `globs` is not specified.
+            - To prevent the DocTestFinder from extracting DocTests
+              from objects that are imported from other modules.
+            - To find the name of the file containing the object.
+            - To help find the line number of the object within its
+              file.
+
+        Contained objects whose module does not match `module` are ignored.
+
+        If `module` is False, no attempt to find the module will be made.
+        This is obscure, of use mostly in tests:  if `module` is False, or
+        is None but cannot be found automatically, then all objects are
+        considered to belong to the (non-existent) module, so all contained
+        objects will (recursively) be searched for doctests.
+
+        The globals for each DocTest is formed by combining `globs`
+        and `extraglobs` (bindings in `extraglobs` override bindings
+        in `globs`).  A new copy of the globals dictionary is created
+        for each DocTest.  If `globs` is not specified, then it
+        defaults to the module's `__dict__`, if specified, or {}
+        otherwise.  If `extraglobs` is not specified, then it defaults
+        to {}.
+
+        """
+        # If name was not specified, then extract it from the object.
+        if name is None:
+            name = getattr(obj, '__name__', None)
+            if name is None:
+                raise ValueError("DocTestFinder.find: name must be given "
+                        "when obj.__name__ doesn't exist: %r" %
+                                 (type(obj),))
+
+        # Find the module that contains the given object (if obj is
+        # a module, then module=obj.).  Note: this may fail, in which
+        # case module will be None.
+        if module is False:
+            module = None
+        elif module is None:
+            module = inspect.getmodule(obj)
+
+        # Read the module's source code.  This is used by
+        # DocTestFinder._find_lineno to find the line number for a
+        # given object's docstring.
+        try:
+            file = inspect.getsourcefile(obj) or inspect.getfile(obj)
+            source_lines = linecache.getlines(file)
+            if not source_lines:
+                source_lines = None
+        except TypeError:
+            source_lines = None
+
+        # Initialize globals, and merge in extraglobs.
+        if globs is None:
+            if module is None:
+                globs = {}
+            else:
+                globs = module.__dict__.copy()
+        else:
+            globs = globs.copy()
+        if extraglobs is not None:
+            globs.update(extraglobs)
+
+        # Recursively expore `obj`, extracting DocTests.
+        tests = []
+        self._find(tests, obj, name, module, source_lines, globs, {})
+        return tests
+
+    def _filter(self, obj, prefix, base):
+        """
+        Return true if the given object should not be examined.
+        """
+        return (self._namefilter is not None and
+                self._namefilter(prefix, base))
+
+    def _from_module(self, module, object):
+        """
+        Return true if the given object is defined in the given
+        module.
+        """
+        if module is None:
+            return True
+        elif inspect.isfunction(object):
+            return module.__dict__ is object.func_globals
+        elif inspect.isclass(object):
+            return module.__name__ == object.__module__
+        elif inspect.getmodule(object) is not None:
+            return module is inspect.getmodule(object)
+        elif hasattr(object, '__module__'):
+            return module.__name__ == object.__module__
+        elif isinstance(object, property):
+            return True # [XX] no way not be sure.
+        else:
+            raise ValueError("object must be a class or function")
+
+    def _find(self, tests, obj, name, module, source_lines, globs, seen):
+        """
+        Find tests for the given object and any contained objects, and
+        add them to `tests`.
+        """
+        if self._verbose:
+            print 'Finding tests in %s' % name
+
+        # If we've already processed this object, then ignore it.
+        if id(obj) in seen:
+            return
+        seen[id(obj)] = 1
+
+        # Find a test for this object, and add it to the list of tests.
+        test = self._get_test(obj, name, module, globs, source_lines)
+        if test is not None:
+            tests.append(test)
+
+        # Look for tests in a module's contained objects.
+        if inspect.ismodule(obj) and self._recurse:
+            for valname, val in obj.__dict__.items():
+                # Check if this contained object should be ignored.
+                if self._filter(val, name, valname):
+                    continue
+                valname = '%s.%s' % (name, valname)
+                # Recurse to functions & classes.
+                if ((inspect.isfunction(val) or inspect.isclass(val)) and
+                    self._from_module(module, val)):
+                    self._find(tests, val, valname, module, source_lines,
+                               globs, seen)
+
+        # Look for tests in a module's __test__ dictionary.
+        if inspect.ismodule(obj) and self._recurse:
+            for valname, val in getattr(obj, '__test__', {}).items():
+                if not isinstance(valname, basestring):
+                    raise ValueError("DocTestFinder.find: __test__ keys "
+                                     "must be strings: %r" %
+                                     (type(valname),))
+                if not (inspect.isfunction(val) or inspect.isclass(val) or
+                        inspect.ismethod(val) or inspect.ismodule(val) or
+                        isinstance(val, basestring)):
+                    raise ValueError("DocTestFinder.find: __test__ values "
+                                     "must be strings, functions, methods, "
+                                     "classes, or modules: %r" %
+                                     (type(val),))
+                valname = '%s.__test__.%s' % (name, valname)
+                self._find(tests, val, valname, module, source_lines,
+                           globs, seen)
+
+        # Look for tests in a class's contained objects.
+        if inspect.isclass(obj) and self._recurse:
+            for valname, val in obj.__dict__.items():
+                # Check if this contained object should be ignored.
+                if self._filter(val, name, valname):
+                    continue
+                # Special handling for staticmethod/classmethod.
+                if isinstance(val, staticmethod):
+                    val = getattr(obj, valname)
+                if isinstance(val, classmethod):
+                    val = getattr(obj, valname).im_func
+
+                # Recurse to methods, properties, and nested classes.
+                if ((inspect.isfunction(val) or inspect.isclass(val) or
+                      isinstance(val, property)) and
+                      self._from_module(module, val)):
+                    valname = '%s.%s' % (name, valname)
+                    self._find(tests, val, valname, module, source_lines,
+                               globs, seen)
+
+    def _get_test(self, obj, name, module, globs, source_lines):
+        """
+        Return a DocTest for the given object, if it defines a docstring;
+        otherwise, return None.
+        """
+        # Extract the object's docstring.  If it doesn't have one,
+        # then return None (no test for this object).
+        if isinstance(obj, basestring):
+            docstring = obj
+        else:
+            try:
+                if obj.__doc__ is None:
+                    docstring = ''
+                else:
+                    docstring = obj.__doc__
+                    if not isinstance(docstring, basestring):
+                        docstring = str(docstring)
+            except (TypeError, AttributeError):
+                docstring = ''
+
+        # Find the docstring's location in the file.
+        lineno = self._find_lineno(obj, source_lines)
+
+        # Don't bother if the docstring is empty.
+        if self._exclude_empty and not docstring:
+            return None
+
+        # Return a DocTest for this object.
+        if module is None:
+            filename = None
+        else:
+            filename = getattr(module, '__file__', module.__name__)
+            if filename[-4:] in (".pyc", ".pyo"):
+                filename = filename[:-1]
+        return self._parser.get_doctest(docstring, globs, name,
+                                        filename, lineno)
+
+    def _find_lineno(self, obj, source_lines):
+        """
+        Return a line number of the given object's docstring.  Note:
+        this method assumes that the object has a docstring.
+        """
+        lineno = None
+
+        # Find the line number for modules.
+        if inspect.ismodule(obj):
+            lineno = 0
+
+        # Find the line number for classes.
+        # Note: this could be fooled if a class is defined multiple
+        # times in a single file.
+        if inspect.isclass(obj):
+            if source_lines is None:
+                return None
+            pat = re.compile(r'^\s*class\s*%s\b' %
+                             getattr(obj, '__name__', '-'))
+            for i, line in enumerate(source_lines):
+                if pat.match(line):
+                    lineno = i
+                    break
+
+        # Find the line number for functions & methods.
+        if inspect.ismethod(obj): obj = obj.im_func
+        if inspect.isfunction(obj): obj = obj.func_code
+        if inspect.istraceback(obj): obj = obj.tb_frame
+        if inspect.isframe(obj): obj = obj.f_code
+        if inspect.iscode(obj):
+            lineno = getattr(obj, 'co_firstlineno', None)-1
+
+        # Find the line number where the docstring starts.  Assume
+        # that it's the first line that begins with a quote mark.
+        # Note: this could be fooled by a multiline function
+        # signature, where a continuation line begins with a quote
+        # mark.
+        if lineno is not None:
+            if source_lines is None:
+                return lineno+1
+            pat = re.compile('(^|.*:)\s*\w*("|\')')
+            for lineno in range(lineno, len(source_lines)):
+                if pat.match(source_lines[lineno]):
+                    return lineno
+
+        # We couldn't find the line number.
+        return None
+
+######################################################################
+## 5. DocTest Runner
+######################################################################
+
+class DocTestRunner:
+    """
+    A class used to run DocTest test cases, and accumulate statistics.
+    The `run` method is used to process a single DocTest case.  It
+    returns a tuple `(f, t)`, where `t` is the number of test cases
+    tried, and `f` is the number of test cases that failed.
+
+        >>> tests = DocTestFinder().find(_TestClass)
+        >>> runner = DocTestRunner(verbose=False)
+        >>> for test in tests:
+        ...     print runner.run(test)
+        (0, 2)
+        (0, 1)
+        (0, 2)
+        (0, 2)
+
+    The `summarize` method prints a summary of all the test cases that
+    have been run by the runner, and returns an aggregated `(f, t)`
+    tuple:
+
+        >>> runner.summarize(verbose=1)
+        4 items passed all tests:
+           2 tests in _TestClass
+           2 tests in _TestClass.__init__
+           2 tests in _TestClass.get
+           1 tests in _TestClass.square
+        7 tests in 4 items.
+        7 passed and 0 failed.
+        Test passed.
+        (0, 7)
+
+    The aggregated number of tried examples and failed examples is
+    also available via the `tries` and `failures` attributes:
+
+        >>> runner.tries
+        7
+        >>> runner.failures
+        0
+
+    The comparison between expected outputs and actual outputs is done
+    by an `OutputChecker`.  This comparison may be customized with a
+    number of option flags; see the documentation for `testmod` for
+    more information.  If the option flags are insufficient, then the
+    comparison may also be customized by passing a subclass of
+    `OutputChecker` to the constructor.
+
+    The test runner's display output can be controlled in two ways.
+    First, an output function (`out) can be passed to
+    `TestRunner.run`; this function will be called with strings that
+    should be displayed.  It defaults to `sys.stdout.write`.  If
+    capturing the output is not sufficient, then the display output
+    can be also customized by subclassing DocTestRunner, and
+    overriding the methods `report_start`, `report_success`,
+    `report_unexpected_exception`, and `report_failure`.
+    """
+    # This divider string is used to separate failure messages, and to
+    # separate sections of the summary.
+    DIVIDER = "*" * 70
+
+    def __init__(self, checker=None, verbose=None, optionflags=0):
+        """
+        Create a new test runner.
+
+        Optional keyword arg `checker` is the `OutputChecker` that
+        should be used to compare the expected outputs and actual
+        outputs of doctest examples.
+
+        Optional keyword arg 'verbose' prints lots of stuff if true,
+        only failures if false; by default, it's true iff '-v' is in
+        sys.argv.
+
+        Optional argument `optionflags` can be used to control how the
+        test runner compares expected output to actual output, and how
+        it displays failures.  See the documentation for `testmod` for
+        more information.
+        """
+        self._checker = checker or OutputChecker()
+        if verbose is None:
+            verbose = '-v' in sys.argv
+        self._verbose = verbose
+        self.optionflags = optionflags
+        self.original_optionflags = optionflags
+
+        # Keep track of the examples we've run.
+        self.tries = 0
+        self.failures = 0
+        self._name2ft = {}
+
+        # Create a fake output target for capturing doctest output.
+        self._fakeout = _SpoofOut()
+
+    #/////////////////////////////////////////////////////////////////
+    # Reporting methods
+    #/////////////////////////////////////////////////////////////////
+
+    def report_start(self, out, test, example):
+        """
+        Report that the test runner is about to process the given
+        example.  (Only displays a message if verbose=True)
+        """
+        if self._verbose:
+            if example.want:
+                out('Trying:\n' + _indent(example.source) +
+                    'Expecting:\n' + _indent(example.want))
+            else:
+                out('Trying:\n' + _indent(example.source) +
+                    'Expecting nothing\n')
+
+    def report_success(self, out, test, example, got):
+        """
+        Report that the given example ran successfully.  (Only
+        displays a message if verbose=True)
+        """
+        if self._verbose:
+            out("ok\n")
+
+    def report_failure(self, out, test, example, got):
+        """
+        Report that the given example failed.
+        """
+        out(self._failure_header(test, example) +
+            self._checker.output_difference(example, got, self.optionflags))
+
+    def report_unexpected_exception(self, out, test, example, exc_info):
+        """
+        Report that the given example raised an unexpected exception.
+        """
+        out(self._failure_header(test, example) +
+            'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
+
+    def _failure_header(self, test, example):
+        out = [self.DIVIDER]
+        if test.filename:
+            if test.lineno is not None and example.lineno is not None:
+                lineno = test.lineno + example.lineno + 1
+            else:
+                lineno = '?'
+            out.append('File "%s", line %s, in %s' %
+                       (test.filename, lineno, test.name))
+        else:
+            out.append('Line %s, in %s' % (example.lineno+1, test.name))
+        out.append('Failed example:')
+        source = example.source
+        out.append(_indent(source))
+        return '\n'.join(out)
+
+    #/////////////////////////////////////////////////////////////////
+    # DocTest Running
+    #/////////////////////////////////////////////////////////////////
+
+    def __run(self, test, compileflags, out):
+        """
+        Run the examples in `test`.  Write the outcome of each example
+        with one of the `DocTestRunner.report_*` methods, using the
+        writer function `out`.  `compileflags` is the set of compiler
+        flags that should be used to execute examples.  Return a tuple
+        `(f, t)`, where `t` is the number of examples tried, and `f`
+        is the number of examples that failed.  The examples are run
+        in the namespace `test.globs`.
+        """
+        # Keep track of the number of failures and tries.
+        failures = tries = 0
+
+        # Save the option flags (since option directives can be used
+        # to modify them).
+        original_optionflags = self.optionflags
+
+        SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
+
+        check = self._checker.check_output
+
+        # Process each example.
+        for examplenum, example in enumerate(test.examples):
+
+            # If REPORT_ONLY_FIRST_FAILURE is set, then supress
+            # reporting after the first failure.
+            quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
+                     failures > 0)
+
+            # Merge in the example's options.
+            self.optionflags = original_optionflags
+            if example.options:
+                for (optionflag, val) in example.options.items():
+                    if val:
+                        self.optionflags |= optionflag
+                    else:
+                        self.optionflags &= ~optionflag
+
+            # Record that we started this example.
+            tries += 1
+            if not quiet:
+                self.report_start(out, test, example)
+
+            # Use a special filename for compile(), so we can retrieve
+            # the source code during interactive debugging (see
+            # __patched_linecache_getlines).
+            filename = '<doctest %s[%d]>' % (test.name, examplenum)
+
+            # Run the example in the given context (globs), and record
+            # any exception that gets raised.  (But don't intercept
+            # keyboard interrupts.)
+            try:
+                # Don't blink!  This is where the user's code gets run.
+                exec compile(example.source, filename, "single",
+                             compileflags, 1) in test.globs
+                self.debugger.set_continue() # ==== Example Finished ====
+                exception = None
+            except KeyboardInterrupt:
+                raise
+            except:
+                exception = sys.exc_info()
+                self.debugger.set_continue() # ==== Example Finished ====
+
+            got = self._fakeout.getvalue()  # the actual output
+            self._fakeout.truncate(0)
+            outcome = FAILURE   # guilty until proved innocent or insane
+
+            # If the example executed without raising any exceptions,
+            # verify its output.
+            if exception is None:
+                if check(example.want, got, self.optionflags):
+                    outcome = SUCCESS
+
+            # The example raised an exception:  check if it was expected.
+            else:
+                exc_info = sys.exc_info()
+                exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
+                if not quiet:
+                    got += _exception_traceback(exc_info)
+
+                # If `example.exc_msg` is None, then we weren't expecting
+                # an exception.
+                if example.exc_msg is None:
+                    outcome = BOOM
+
+                # We expected an exception:  see whether it matches.
+                elif check(example.exc_msg, exc_msg, self.optionflags):
+                    outcome = SUCCESS
+
+                # Another chance if they didn't care about the detail.
+                elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
+                    m1 = re.match(r'[^:]*:', example.exc_msg)
+                    m2 = re.match(r'[^:]*:', exc_msg)
+                    if m1 and m2 and check(m1.group(0), m2.group(0),
+                                           self.optionflags):
+                        outcome = SUCCESS
+
+            # Report the outcome.
+            if outcome is SUCCESS:
+                if not quiet:
+                    self.report_success(out, test, example, got)
+            elif outcome is FAILURE:
+                if not quiet:
+                    self.report_failure(out, test, example, got)
+                failures += 1
+            elif outcome is BOOM:
+                if not quiet:
+                    self.report_unexpected_exception(out, test, example,
+                                                     exc_info)
+                failures += 1
+            else:
+                assert False, ("unknown outcome", outcome)
+
+        # Restore the option flags (in case they were modified)
+        self.optionflags = original_optionflags
+
+        # Record and return the number of failures and tries.
+        self.__record_outcome(test, failures, tries)
+        return failures, tries
+
+    def __record_outcome(self, test, f, t):
+        """
+        Record the fact that the given DocTest (`test`) generated `f`
+        failures out of `t` tried examples.
+        """
+        f2, t2 = self._name2ft.get(test.name, (0,0))
+        self._name2ft[test.name] = (f+f2, t+t2)
+        self.failures += f
+        self.tries += t
+
+    __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
+                                         r'(?P<name>[\w\.]+)'
+                                         r'\[(?P<examplenum>\d+)\]>$')
+    def __patched_linecache_getlines(self, filename, module_globals=None):
+        m = self.__LINECACHE_FILENAME_RE.match(filename)
+        if m and m.group('name') == self.test.name:
+            example = self.test.examples[int(m.group('examplenum'))]
+            return example.source.splitlines(True)
+        elif self.save_linecache_getlines.func_code.co_argcount>1:
+            return self.save_linecache_getlines(filename, module_globals)
+        else:
+            return self.save_linecache_getlines(filename)
+
+    def run(self, test, compileflags=None, out=None, clear_globs=True):
+        """
+        Run the examples in `test`, and display the results using the
+        writer function `out`.
+
+        The examples are run in the namespace `test.globs`.  If
+        `clear_globs` is true (the default), then this namespace will
+        be cleared after the test runs, to help with garbage
+        collection.  If you would like to examine the namespace after
+        the test completes, then use `clear_globs=False`.
+
+        `compileflags` gives the set of flags that should be used by
+        the Python compiler when running the examples.  If not
+        specified, then it will default to the set of future-import
+        flags that apply to `globs`.
+
+        The output of each example is checked using
+        `DocTestRunner.check_output`, and the results are formatted by
+        the `DocTestRunner.report_*` methods.
+        """
+        self.test = test
+
+        if compileflags is None:
+            compileflags = _extract_future_flags(test.globs)
+
+        save_stdout = sys.stdout
+        if out is None:
+            out = save_stdout.write
+        sys.stdout = self._fakeout
+
+        # Patch pdb.set_trace to restore sys.stdout during interactive
+        # debugging (so it's not still redirected to self._fakeout).
+        # Note that the interactive output will go to *our*
+        # save_stdout, even if that's not the real sys.stdout; this
+        # allows us to write test cases for the set_trace behavior.
+        save_set_trace = pdb.set_trace
+        self.debugger = _OutputRedirectingPdb(save_stdout)
+        self.debugger.reset()
+        pdb.set_trace = self.debugger.set_trace
+
+        # Patch linecache.getlines, so we can see the example's source
+        # when we're inside the debugger.
+        self.save_linecache_getlines = linecache.getlines
+        linecache.getlines = self.__patched_linecache_getlines
+
+        try:
+            return self.__run(test, compileflags, out)
+        finally:
+            sys.stdout = save_stdout
+            pdb.set_trace = save_set_trace
+            linecache.getlines = self.save_linecache_getlines
+            if clear_globs:
+                test.globs.clear()
+
+    #/////////////////////////////////////////////////////////////////
+    # Summarization
+    #/////////////////////////////////////////////////////////////////
+    def summarize(self, verbose=None):
+        """
+        Print a summary of all the test cases that have been run by
+        this DocTestRunner, and return a tuple `(f, t)`, where `f` is
+        the total number of failed examples, and `t` is the total
+        number of tried examples.
+
+        The optional `verbose` argument controls how detailed the
+        summary is.  If the verbosity is not specified, then the
+        DocTestRunner's verbosity is used.
+        """
+        if verbose is None:
+            verbose = self._verbose
+        notests = []
+        passed = []
+        failed = []
+        totalt = totalf = 0
+        for x in self._name2ft.items():
+            name, (f, t) = x
+            assert f <= t
+            totalt += t
+            totalf += f
+            if t == 0:
+                notests.append(name)
+            elif f == 0:
+                passed.append( (name, t) )
+            else:
+                failed.append(x)
+        if verbose:
+            if notests:
+                print len(notests), "items had no tests:"
+                notests.sort()
+                for thing in notests:
+                    print "   ", thing
+            if passed:
+                print len(passed), "items passed all tests:"
+                passed.sort()
+                for thing, count in passed:
+                    print " %3d tests in %s" % (count, thing)
+        if failed:
+            print self.DIVIDER
+            print len(failed), "items had failures:"
+            failed.sort()
+            for thing, (f, t) in failed:
+                print " %3d of %3d in %s" % (f, t, thing)
+        if verbose:
+            print totalt, "tests in", len(self._name2ft), "items."
+            print totalt - totalf, "passed and", totalf, "failed."
+        if totalf:
+            print "***Test Failed***", totalf, "failures."
+        elif verbose:
+            print "Test passed."
+        return totalf, totalt
+
+    #/////////////////////////////////////////////////////////////////
+    # Backward compatibility cruft to maintain doctest.master.
+    #/////////////////////////////////////////////////////////////////
+    def merge(self, other):
+        d = self._name2ft
+        for name, (f, t) in other._name2ft.items():
+            if name in d:
+                print "*** DocTestRunner.merge: '" + name + "' in both" \
+                    " testers; summing outcomes."
+                f2, t2 = d[name]
+                f = f + f2
+                t = t + t2
+            d[name] = f, t
+
+class OutputChecker:
+    """
+    A class used to check the whether the actual output from a doctest
+    example matches the expected output.  `OutputChecker` defines two
+    methods: `check_output`, which compares a given pair of outputs,
+    and returns true if they match; and `output_difference`, which
+    returns a string describing the differences between two outputs.
+    """
+    def check_output(self, want, got, optionflags):
+        """
+        Return True iff the actual output from an example (`got`)
+        matches the expected output (`want`).  These strings are
+        always considered to match if they are identical; but
+        depending on what option flags the test runner is using,
+        several non-exact match types are also possible.  See the
+        documentation for `TestRunner` for more information about
+        option flags.
+        """
+        # Handle the common case first, for efficiency:
+        # if they're string-identical, always return true.
+        if got == want:
+            return True
+
+        # The values True and False replaced 1 and 0 as the return
+        # value for boolean comparisons in Python 2.3.
+        if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
+            if (got,want) == ("True\n", "1\n"):
+                return True
+            if (got,want) == ("False\n", "0\n"):
+                return True
+
+        # <BLANKLINE> can be used as a special sequence to signify a
+        # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
+        if not (optionflags & DONT_ACCEPT_BLANKLINE):
+            # Replace <BLANKLINE> in want with a blank line.
+            want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
+                          '', want)
+            # If a line in got contains only spaces, then remove the
+            # spaces.
+            got = re.sub('(?m)^\s*?$', '', got)
+            if got == want:
+                return True
+
+        # This flag causes doctest to ignore any differences in the
+        # contents of whitespace strings.  Note that this can be used
+        # in conjunction with the ELLIPSIS flag.
+        if optionflags & NORMALIZE_WHITESPACE:
+            got = ' '.join(got.split())
+            want = ' '.join(want.split())
+            if got == want:
+                return True
+
+        # The ELLIPSIS flag says to let the sequence "..." in `want`
+        # match any substring in `got`.
+        if optionflags & ELLIPSIS:
+            if _ellipsis_match(want, got):
+                return True
+
+        # We didn't find any match; return false.
+        return False
+
+    # Should we do a fancy diff?
+    def _do_a_fancy_diff(self, want, got, optionflags):
+        # Not unless they asked for a fancy diff.
+        if not optionflags & (REPORT_UDIFF |
+                              REPORT_CDIFF |
+                              REPORT_NDIFF):
+            return False
+
+        # If expected output uses ellipsis, a meaningful fancy diff is
+        # too hard ... or maybe not.  In two real-life failures Tim saw,
+        # a diff was a major help anyway, so this is commented out.
+        # [todo] _ellipsis_match() knows which pieces do and don't match,
+        # and could be the basis for a kick-ass diff in this case.
+        ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
+        ##    return False
+
+        # ndiff does intraline difference marking, so can be useful even
+        # for 1-line differences.
+        if optionflags & REPORT_NDIFF:
+            return True
+
+        # The other diff types need at least a few lines to be helpful.
+        return want.count('\n') > 2 and got.count('\n') > 2
+
+    def output_difference(self, example, got, optionflags):
+        """
+        Return a string describing the differences between the
+        expected output for a given example (`example`) and the actual
+        output (`got`).  `optionflags` is the set of option flags used
+        to compare `want` and `got`.
+        """
+        want = example.want
+        # If <BLANKLINE>s are being used, then replace blank lines
+        # with <BLANKLINE> in the actual output string.
+        if not (optionflags & DONT_ACCEPT_BLANKLINE):
+            got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
+
+        # Check if we should use diff.
+        if self._do_a_fancy_diff(want, got, optionflags):
+            # Split want & got into lines.
+            want_lines = want.splitlines(True)  # True == keep line ends
+            got_lines = got.splitlines(True)
+            # Use difflib to find their differences.
+            if optionflags & REPORT_UDIFF:
+                diff = difflib.unified_diff(want_lines, got_lines, n=2)
+                diff = list(diff)[2:] # strip the diff header
+                kind = 'unified diff with -expected +actual'
+            elif optionflags & REPORT_CDIFF:
+                diff = difflib.context_diff(want_lines, got_lines, n=2)
+                diff = list(diff)[2:] # strip the diff header
+                kind = 'context diff with expected followed by actual'
+            elif optionflags & REPORT_NDIFF:
+                engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
+                diff = list(engine.compare(want_lines, got_lines))
+                kind = 'ndiff with -expected +actual'
+            else:
+                assert 0, 'Bad diff option'
+            # Remove trailing whitespace on diff output.
+            diff = [line.rstrip() + '\n' for line in diff]
+            return 'Differences (%s):\n' % kind + _indent(''.join(diff))
+
+        # If we're not using diff, then simply list the expected
+        # output followed by the actual output.
+        if want and got:
+            return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
+        elif want:
+            return 'Expected:\n%sGot nothing\n' % _indent(want)
+        elif got:
+            return 'Expected nothing\nGot:\n%s' % _indent(got)
+        else:
+            return 'Expected nothing\nGot nothing\n'
+
+class DocTestFailure(Exception):
+    """A DocTest example has failed in debugging mode.
+
+    The exception instance has variables:
+
+    - test: the DocTest object being run
+
+    - excample: the Example object that failed
+
+    - got: the actual output
+    """
+    def __init__(self, test, example, got):
+        self.test = test
+        self.example = example
+        self.got = got
+
+    def __str__(self):
+        return str(self.test)
+
+class UnexpectedException(Exception):
+    """A DocTest example has encountered an unexpected exception
+
+    The exception instance has variables:
+
+    - test: the DocTest object being run
+
+    - excample: the Example object that failed
+
+    - exc_info: the exception info
+    """
+    def __init__(self, test, example, exc_info):
+        self.test = test
+        self.example = example
+        self.exc_info = exc_info
+
+    def __str__(self):
+        return str(self.test)
+
+class DebugRunner(DocTestRunner):
+    r"""Run doc tests but raise an exception as soon as there is a failure.
+
+       If an unexpected exception occurs, an UnexpectedException is raised.
+       It contains the test, the example, and the original exception:
+
+         >>> runner = DebugRunner(verbose=False)
+         >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
+         ...                                    {}, 'foo', 'foo.py', 0)
+         >>> try:
+         ...     runner.run(test)
+         ... except UnexpectedException, failure:
+         ...     pass
+
+         >>> failure.test is test
+         True
+
+         >>> failure.example.want
+         '42\n'
+
+         >>> exc_info = failure.exc_info
+         >>> raise exc_info[0], exc_info[1], exc_info[2]
+         Traceback (most recent call last):
+         ...
+         KeyError
+
+       We wrap the original exception to give the calling application
+       access to the test and example information.
+
+       If the output doesn't match, then a DocTestFailure is raised:
+
+         >>> test = DocTestParser().get_doctest('''
+         ...      >>> x = 1
+         ...      >>> x
+         ...      2
+         ...      ''', {}, 'foo', 'foo.py', 0)
+
+         >>> try:
+         ...    runner.run(test)
+         ... except DocTestFailure, failure:
+         ...    pass
+
+       DocTestFailure objects provide access to the test:
+
+         >>> failure.test is test
+         True
+
+       As well as to the example:
+
+         >>> failure.example.want
+         '2\n'
+
+       and the actual output:
+
+         >>> failure.got
+         '1\n'
+
+       If a failure or error occurs, the globals are left intact:
+
+         >>> del test.globs['__builtins__']
+         >>> test.globs
+         {'x': 1}
+
+         >>> test = DocTestParser().get_doctest('''
+         ...      >>> x = 2
+         ...      >>> raise KeyError
+         ...      ''', {}, 'foo', 'foo.py', 0)
+
+         >>> runner.run(test)
+         Traceback (most recent call last):
+         ...
+         UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
+
+         >>> del test.globs['__builtins__']
+         >>> test.globs
+         {'x': 2}
+
+       But the globals are cleared if there is no error:
+
+         >>> test = DocTestParser().get_doctest('''
+         ...      >>> x = 2
+         ...      ''', {}, 'foo', 'foo.py', 0)
+
+         >>> runner.run(test)
+         (0, 1)
+
+         >>> test.globs
+         {}
+
+       """
+
+    def run(self, test, compileflags=None, out=None, clear_globs=True):
+        r = DocTestRunner.run(self, test, compileflags, out, False)
+        if clear_globs:
+            test.globs.clear()
+        return r
+
+    def report_unexpected_exception(self, out, test, example, exc_info):
+        raise UnexpectedException(test, example, exc_info)
+
+    def report_failure(self, out, test, example, got):
+        raise DocTestFailure(test, example, got)
+
+######################################################################
+## 6. Test Functions
+######################################################################
+# These should be backwards compatible.
+
+# For backward compatibility, a global instance of a DocTestRunner
+# class, updated by testmod.
+master = None
+
+def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
+            report=True, optionflags=0, extraglobs=None,
+            raise_on_error=False, exclude_empty=False):
+    """m=None, name=None, globs=None, verbose=None, isprivate=None,
+       report=True, optionflags=0, extraglobs=None, raise_on_error=False,
+       exclude_empty=False
+
+    Test examples in docstrings in functions and classes reachable
+    from module m (or the current module if m is not supplied), starting
+    with m.__doc__.  Unless isprivate is specified, private names
+    are not skipped.
+
+    Also test examples reachable from dict m.__test__ if it exists and is
+    not None.  m.__test__ maps names to functions, classes and strings;
+    function and class docstrings are tested even if the name is private;
+    strings are tested directly, as if they were docstrings.
+
+    Return (#failures, #tests).
+
+    See doctest.__doc__ for an overview.
+
+    Optional keyword arg "name" gives the name of the module; by default
+    use m.__name__.
+
+    Optional keyword arg "globs" gives a dict to be used as the globals
+    when executing examples; by default, use m.__dict__.  A copy of this
+    dict is actually used for each docstring, so that each docstring's
+    examples start with a clean slate.
+
+    Optional keyword arg "extraglobs" gives a dictionary that should be
+    merged into the globals that are used to execute examples.  By
+    default, no extra globals are used.  This is new in 2.4.
+
+    Optional keyword arg "verbose" prints lots of stuff if true, prints
+    only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+    Optional keyword arg "report" prints a summary at the end when true,
+    else prints nothing at the end.  In verbose mode, the summary is
+    detailed, else very brief (in fact, empty if all tests passed).
+
+    Optional keyword arg "optionflags" or's together module constants,
+    and defaults to 0.  This is new in 2.3.  Possible values (see the
+    docs for details):
+
+        DONT_ACCEPT_TRUE_FOR_1
+        DONT_ACCEPT_BLANKLINE
+        NORMALIZE_WHITESPACE
+        ELLIPSIS
+        IGNORE_EXCEPTION_DETAIL
+        REPORT_UDIFF
+        REPORT_CDIFF
+        REPORT_NDIFF
+        REPORT_ONLY_FIRST_FAILURE
+
+    Optional keyword arg "raise_on_error" raises an exception on the
+    first unexpected exception or failure. This allows failures to be
+    post-mortem debugged.
+
+    Deprecated in Python 2.4:
+    Optional keyword arg "isprivate" specifies a function used to
+    determine whether a name is private.  The default function is
+    treat all functions as public.  Optionally, "isprivate" can be
+    set to doctest.is_private to skip over functions marked as private
+    using the underscore naming convention; see its docs for details.
+
+    Advanced tomfoolery:  testmod runs methods of a local instance of
+    class doctest.Tester, then merges the results into (or creates)
+    global Tester instance doctest.master.  Methods of doctest.master
+    can be called directly too, if you want to do something unusual.
+    Passing report=0 to testmod is especially useful then, to delay
+    displaying a summary.  Invoke doctest.master.summarize(verbose)
+    when you're done fiddling.
+    """
+    global master
+
+    if isprivate is not None:
+        warnings.warn("the isprivate argument is deprecated; "
+                      "examine DocTestFinder.find() lists instead",
+                      DeprecationWarning)
+
+    # If no module was given, then use __main__.
+    if m is None:
+        # DWA - m will still be None if this wasn't invoked from the command
+        # line, in which case the following TypeError is about as good an error
+        # as we should expect
+        m = sys.modules.get('__main__')
+
+    # Check that we were actually given a module.
+    if not inspect.ismodule(m):
+        raise TypeError("testmod: module required; %r" % (m,))
+
+    # If no name was given, then use the module's name.
+    if name is None:
+        name = m.__name__
+
+    # Find, parse, and run all tests in the given module.
+    finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
+
+    if raise_on_error:
+        runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+    else:
+        runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+    for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
+        runner.run(test)
+
+    if report:
+        runner.summarize()
+
+    if master is None:
+        master = runner
+    else:
+        master.merge(runner)
+
+    return runner.failures, runner.tries
+
+def testfile(filename, module_relative=True, name=None, package=None,
+             globs=None, verbose=None, report=True, optionflags=0,
+             extraglobs=None, raise_on_error=False, parser=DocTestParser()):
+    """
+    Test examples in the given file.  Return (#failures, #tests).
+
+    Optional keyword arg "module_relative" specifies how filenames
+    should be interpreted:
+
+      - If "module_relative" is True (the default), then "filename"
+         specifies a module-relative path.  By default, this path is
+         relative to the calling module's directory; but if the
+         "package" argument is specified, then it is relative to that
+         package.  To ensure os-independence, "filename" should use
+         "/" characters to separate path segments, and should not
+         be an absolute path (i.e., it may not begin with "/").
+
+      - If "module_relative" is False, then "filename" specifies an
+        os-specific path.  The path may be absolute or relative (to
+        the current working directory).
+
+    Optional keyword arg "name" gives the name of the test; by default
+    use the file's basename.
+
+    Optional keyword argument "package" is a Python package or the
+    name of a Python package whose directory should be used as the
+    base directory for a module relative filename.  If no package is
+    specified, then the calling module's directory is used as the base
+    directory for module relative filenames.  It is an error to
+    specify "package" if "module_relative" is False.
+
+    Optional keyword arg "globs" gives a dict to be used as the globals
+    when executing examples; by default, use {}.  A copy of this dict
+    is actually used for each docstring, so that each docstring's
+    examples start with a clean slate.
+
+    Optional keyword arg "extraglobs" gives a dictionary that should be
+    merged into the globals that are used to execute examples.  By
+    default, no extra globals are used.
+
+    Optional keyword arg "verbose" prints lots of stuff if true, prints
+    only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+    Optional keyword arg "report" prints a summary at the end when true,
+    else prints nothing at the end.  In verbose mode, the summary is
+    detailed, else very brief (in fact, empty if all tests passed).
+
+    Optional keyword arg "optionflags" or's together module constants,
+    and defaults to 0.  Possible values (see the docs for details):
+
+        DONT_ACCEPT_TRUE_FOR_1
+        DONT_ACCEPT_BLANKLINE
+        NORMALIZE_WHITESPACE
+        ELLIPSIS
+        IGNORE_EXCEPTION_DETAIL
+        REPORT_UDIFF
+        REPORT_CDIFF
+        REPORT_NDIFF
+        REPORT_ONLY_FIRST_FAILURE
+
+    Optional keyword arg "raise_on_error" raises an exception on the
+    first unexpected exception or failure. This allows failures to be
+    post-mortem debugged.
+
+    Optional keyword arg "parser" specifies a DocTestParser (or
+    subclass) that should be used to extract tests from the files.
+
+    Advanced tomfoolery:  testmod runs methods of a local instance of
+    class doctest.Tester, then merges the results into (or creates)
+    global Tester instance doctest.master.  Methods of doctest.master
+    can be called directly too, if you want to do something unusual.
+    Passing report=0 to testmod is especially useful then, to delay
+    displaying a summary.  Invoke doctest.master.summarize(verbose)
+    when you're done fiddling.
+    """
+    global master
+
+    if package and not module_relative:
+        raise ValueError("Package may only be specified for module-"
+                         "relative paths.")
+
+    # Relativize the path
+    if module_relative:
+        package = _normalize_module(package)
+        filename = _module_relative_path(package, filename)
+
+    # If no name was given, then use the file's name.
+    if name is None:
+        name = os.path.basename(filename)
+
+    # Assemble the globals.
+    if globs is None:
+        globs = {}
+    else:
+        globs = globs.copy()
+    if extraglobs is not None:
+        globs.update(extraglobs)
+
+    if raise_on_error:
+        runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+    else:
+        runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+    # Read the file, convert it to a test, and run it.
+    f = open(filename)
+    s = f.read()
+    f.close()
+    test = parser.get_doctest(s, globs, name, filename, 0)
+    runner.run(test)
+
+    if report:
+        runner.summarize()
+
+    if master is None:
+        master = runner
+    else:
+        master.merge(runner)
+
+    return runner.failures, runner.tries
+
+def run_docstring_examples(f, globs, verbose=False, name="NoName",
+                           compileflags=None, optionflags=0):
+    """
+    Test examples in the given object's docstring (`f`), using `globs`
+    as globals.  Optional argument `name` is used in failure messages.
+    If the optional argument `verbose` is true, then generate output
+    even if there are no failures.
+
+    `compileflags` gives the set of flags that should be used by the
+    Python compiler when running the examples.  If not specified, then
+    it will default to the set of future-import flags that apply to
+    `globs`.
+
+    Optional keyword arg `optionflags` specifies options for the
+    testing and output.  See the documentation for `testmod` for more
+    information.
+    """
+    # Find, parse, and run all tests in the given module.
+    finder = DocTestFinder(verbose=verbose, recurse=False)
+    runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+    for test in finder.find(f, name, globs=globs):
+        runner.run(test, compileflags=compileflags)
+
+######################################################################
+## 7. Tester
+######################################################################
+# This is provided only for backwards compatibility.  It's not
+# actually used in any way.
+
+class Tester:
+    def __init__(self, mod=None, globs=None, verbose=None,
+                 isprivate=None, optionflags=0):
+
+        warnings.warn("class Tester is deprecated; "
+                      "use class doctest.DocTestRunner instead",
+                      DeprecationWarning, stacklevel=2)
+        if mod is None and globs is None:
+            raise TypeError("Tester.__init__: must specify mod or globs")
+        if mod is not None and not inspect.ismodule(mod):
+            raise TypeError("Tester.__init__: mod must be a module; %r" %
+                            (mod,))
+        if globs is None:
+            globs = mod.__dict__
+        self.globs = globs
+
+        self.verbose = verbose
+        self.isprivate = isprivate
+        self.optionflags = optionflags
+        self.testfinder = DocTestFinder(_namefilter=isprivate)
+        self.testrunner = DocTestRunner(verbose=verbose,
+                                        optionflags=optionflags)
+
+    def runstring(self, s, name):
+        test = DocTestParser().get_doctest(s, self.globs, name, None, None)
+        if self.verbose:
+            print "Running string", name
+        (f,t) = self.testrunner.run(test)
+        if self.verbose:
+            print f, "of", t, "examples failed in string", name
+        return (f,t)
+
+    def rundoc(self, object, name=None, module=None):
+        f = t = 0
+        tests = self.testfinder.find(object, name, module=module,
+                                     globs=self.globs)
+        for test in tests:
+            (f2, t2) = self.testrunner.run(test)
+            (f,t) = (f+f2, t+t2)
+        return (f,t)
+
+    def rundict(self, d, name, module=None):
+        import types
+        m = types.ModuleType(name)
+        m.__dict__.update(d)
+        if module is None:
+            module = False
+        return self.rundoc(m, name, module)
+
+    def run__test__(self, d, name):
+        import types
+        m = types.ModuleType(name)
+        m.__test__ = d
+        return self.rundoc(m, name)
+
+    def summarize(self, verbose=None):
+        return self.testrunner.summarize(verbose)
+
+    def merge(self, other):
+        self.testrunner.merge(other.testrunner)
+
+######################################################################
+## 8. Unittest Support
+######################################################################
+
+_unittest_reportflags = 0
+
+def set_unittest_reportflags(flags):
+    """Sets the unittest option flags.
+
+    The old flag is returned so that a runner could restore the old
+    value if it wished to:
+
+      >>> old = _unittest_reportflags
+      >>> set_unittest_reportflags(REPORT_NDIFF |
+      ...                          REPORT_ONLY_FIRST_FAILURE) == old
+      True
+
+      >>> import doctest
+      >>> doctest._unittest_reportflags == (REPORT_NDIFF |
+      ...                                   REPORT_ONLY_FIRST_FAILURE)
+      True
+
+    Only reporting flags can be set:
+
+      >>> set_unittest_reportflags(ELLIPSIS)
+      Traceback (most recent call last):
+      ...
+      ValueError: ('Only reporting flags allowed', 8)
+
+      >>> set_unittest_reportflags(old) == (REPORT_NDIFF |
+      ...                                   REPORT_ONLY_FIRST_FAILURE)
+      True
+    """
+    global _unittest_reportflags
+
+    if (flags & REPORTING_FLAGS) != flags:
+        raise ValueError("Only reporting flags allowed", flags)
+    old = _unittest_reportflags
+    _unittest_reportflags = flags
+    return old
+
+
+class DocTestCase(unittest.TestCase):
+
+    def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+                 checker=None):
+
+        unittest.TestCase.__init__(self)
+        self._dt_optionflags = optionflags
+        self._dt_checker = checker
+        self._dt_test = test
+        self._dt_setUp = setUp
+        self._dt_tearDown = tearDown
+
+    def setUp(self):
+        test = self._dt_test
+
+        if self._dt_setUp is not None:
+            self._dt_setUp(test)
+
+    def tearDown(self):
+        test = self._dt_test
+
+        if self._dt_tearDown is not None:
+            self._dt_tearDown(test)
+
+        test.globs.clear()
+
+    def runTest(self):
+        test = self._dt_test
+        old = sys.stdout
+        new = StringIO()
+        optionflags = self._dt_optionflags
+
+        if not (optionflags & REPORTING_FLAGS):
+            # The option flags don't include any reporting flags,
+            # so add the default reporting flags
+            optionflags |= _unittest_reportflags
+
+        runner = DocTestRunner(optionflags=optionflags,
+                               checker=self._dt_checker, verbose=False)
+
+        try:
+            runner.DIVIDER = "-"*70
+            failures, tries = runner.run(
+                test, out=new.write, clear_globs=False)
+        finally:
+            sys.stdout = old
+
+        if failures:
+            raise self.failureException(self.format_failure(new.getvalue()))
+
+    def format_failure(self, err):
+        test = self._dt_test
+        if test.lineno is None:
+            lineno = 'unknown line number'
+        else:
+            lineno = '%s' % test.lineno
+        lname = '.'.join(test.name.split('.')[-1:])
+        return ('Failed doctest test for %s\n'
+                '  File "%s", line %s, in %s\n\n%s'
+                % (test.name, test.filename, lineno, lname, err)
+                )
+
+    def debug(self):
+        r"""Run the test case without results and without catching exceptions
+
+           The unit test framework includes a debug method on test cases
+           and test suites to support post-mortem debugging.  The test code
+           is run in such a way that errors are not caught.  This way a
+           caller can catch the errors and initiate post-mortem debugging.
+
+           The DocTestCase provides a debug method that raises
+           UnexpectedException errors if there is an unexepcted
+           exception:
+
+             >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
+             ...                {}, 'foo', 'foo.py', 0)
+             >>> case = DocTestCase(test)
+             >>> try:
+             ...     case.debug()
+             ... except UnexpectedException, failure:
+             ...     pass
+
+           The UnexpectedException contains the test, the example, and
+           the original exception:
+
+             >>> failure.test is test
+             True
+
+             >>> failure.example.want
+             '42\n'
+
+             >>> exc_info = failure.exc_info
+             >>> raise exc_info[0], exc_info[1], exc_info[2]
+             Traceback (most recent call last):
+             ...
+             KeyError
+
+           If the output doesn't match, then a DocTestFailure is raised:
+
+             >>> test = DocTestParser().get_doctest('''
+             ...      >>> x = 1
+             ...      >>> x
+             ...      2
+             ...      ''', {}, 'foo', 'foo.py', 0)
+             >>> case = DocTestCase(test)
+
+             >>> try:
+             ...    case.debug()
+             ... except DocTestFailure, failure:
+             ...    pass
+
+           DocTestFailure objects provide access to the test:
+
+             >>> failure.test is test
+             True
+
+           As well as to the example:
+
+             >>> failure.example.want
+             '2\n'
+
+           and the actual output:
+
+             >>> failure.got
+             '1\n'
+
+           """
+
+        self.setUp()
+        runner = DebugRunner(optionflags=self._dt_optionflags,
+                             checker=self._dt_checker, verbose=False)
+        runner.run(self._dt_test)
+        self.tearDown()
+
+    def id(self):
+        return self._dt_test.name
+
+    def __repr__(self):
+        name = self._dt_test.name.split('.')
+        return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
+
+    __str__ = __repr__
+
+    def shortDescription(self):
+        return "Doctest: " + self._dt_test.name
+
+def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
+                 **options):
+    """
+    Convert doctest tests for a module to a unittest test suite.
+
+    This converts each documentation string in a module that
+    contains doctest tests to a unittest test case.  If any of the
+    tests in a doc string fail, then the test case fails.  An exception
+    is raised showing the name of the file containing the test and a
+    (sometimes approximate) line number.
+
+    The `module` argument provides the module to be tested.  The argument
+    can be either a module or a module name.
+
+    If no argument is given, the calling module is used.
+
+    A number of options may be provided as keyword arguments:
+
+    setUp
+      A set-up function.  This is called before running the
+      tests in each file. The setUp function will be passed a DocTest
+      object.  The setUp function can access the test globals as the
+      globs attribute of the test passed.
+
+    tearDown
+      A tear-down function.  This is called after running the
+      tests in each file.  The tearDown function will be passed a DocTest
+      object.  The tearDown function can access the test globals as the
+      globs attribute of the test passed.
+
+    globs
+      A dictionary containing initial global variables for the tests.
+
+    optionflags
+       A set of doctest option flags expressed as an integer.
+    """
+
+    if test_finder is None:
+        test_finder = DocTestFinder()
+
+    module = _normalize_module(module)
+    tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
+    if globs is None:
+        globs = module.__dict__
+    if not tests:
+        # Why do we want to do this? Because it reveals a bug that might
+        # otherwise be hidden.
+        raise ValueError(module, "has no tests")
+
+    tests.sort()
+    suite = unittest.TestSuite()
+    for test in tests:
+        if len(test.examples) == 0:
+            continue
+        if not test.filename:
+            filename = module.__file__
+            if filename[-4:] in (".pyc", ".pyo"):
+                filename = filename[:-1]
+            test.filename = filename
+        suite.addTest(DocTestCase(test, **options))
+
+    return suite
+
+class DocFileCase(DocTestCase):
+
+    def id(self):
+        return '_'.join(self._dt_test.name.split('.'))
+
+    def __repr__(self):
+        return self._dt_test.filename
+    __str__ = __repr__
+
+    def format_failure(self, err):
+        return ('Failed doctest test for %s\n  File "%s", line 0\n\n%s'
+                % (self._dt_test.name, self._dt_test.filename, err)
+                )
+
+def DocFileTest(path, module_relative=True, package=None,
+                globs=None, parser=DocTestParser(), **options):
+    if globs is None:
+        globs = {}
+
+    if package and not module_relative:
+        raise ValueError("Package may only be specified for module-"
+                         "relative paths.")
+
+    # Relativize the path.
+    if module_relative:
+        package = _normalize_module(package)
+        path = _module_relative_path(package, path)
+
+    # Find the file and read it.
+    name = os.path.basename(path)
+    f = open(path)
+    doc = f.read()
+    f.close()
+
+    # Convert it to a test, and wrap it in a DocFileCase.
+    test = parser.get_doctest(doc, globs, name, path, 0)
+    return DocFileCase(test, **options)
+
+def DocFileSuite(*paths, **kw):
+    """A unittest suite for one or more doctest files.
+
+    The path to each doctest file is given as a string; the
+    interpretation of that string depends on the keyword argument
+    "module_relative".
+
+    A number of options may be provided as keyword arguments:
+
+    module_relative
+      If "module_relative" is True, then the given file paths are
+      interpreted as os-independent module-relative paths.  By
+      default, these paths are relative to the calling module's
+      directory; but if the "package" argument is specified, then
+      they are relative to that package.  To ensure os-independence,
+      "filename" should use "/" characters to separate path
+      segments, and may not be an absolute path (i.e., it may not
+      begin with "/").
+
+      If "module_relative" is False, then the given file paths are
+      interpreted as os-specific paths.  These paths may be absolute
+      or relative (to the current working directory).
+
+    package
+      A Python package or the name of a Python package whose directory
+      should be used as the base directory for module relative paths.
+      If "package" is not specified, then the calling module's
+      directory is used as the base directory for module relative
+      filenames.  It is an error to specify "package" if
+      "module_relative" is False.
+
+    setUp
+      A set-up function.  This is called before running the
+      tests in each file. The setUp function will be passed a DocTest
+      object.  The setUp function can access the test globals as the
+      globs attribute of the test passed.
+
+    tearDown
+      A tear-down function.  This is called after running the
+      tests in each file.  The tearDown function will be passed a DocTest
+      object.  The tearDown function can access the test globals as the
+      globs attribute of the test passed.
+
+    globs
+      A dictionary containing initial global variables for the tests.
+
+    optionflags
+      A set of doctest option flags expressed as an integer.
+
+    parser
+      A DocTestParser (or subclass) that should be used to extract
+      tests from the files.
+    """
+    suite = unittest.TestSuite()
+
+    # We do this here so that _normalize_module is called at the right
+    # level.  If it were called in DocFileTest, then this function
+    # would be the caller and we might guess the package incorrectly.
+    if kw.get('module_relative', True):
+        kw['package'] = _normalize_module(kw.get('package'))
+
+    for path in paths:
+        suite.addTest(DocFileTest(path, **kw))
+
+    return suite
+
+######################################################################
+## 9. Debugging Support
+######################################################################
+
+def script_from_examples(s):
+    r"""Extract script from text with examples.
+
+       Converts text with examples to a Python script.  Example input is
+       converted to regular code.  Example output and all other words
+       are converted to comments:
+
+       >>> text = '''
+       ...       Here are examples of simple math.
+       ...
+       ...           Python has super accurate integer addition
+       ...
+       ...           >>> 2 + 2
+       ...           5
+       ...
+       ...           And very friendly error messages:
+       ...
+       ...           >>> 1/0
+       ...           To Infinity
+       ...           And
+       ...           Beyond
+       ...
+       ...           You can use logic if you want:
+       ...
+       ...           >>> if 0:
+       ...           ...    blah
+       ...           ...    blah
+       ...           ...
+       ...
+       ...           Ho hum
+       ...           '''
+
+       >>> print script_from_examples(text)
+       # Here are examples of simple math.
+       #
+       #     Python has super accurate integer addition
+       #
+       2 + 2
+       # Expected:
+       ## 5
+       #
+       #     And very friendly error messages:
+       #
+       1/0
+       # Expected:
+       ## To Infinity
+       ## And
+       ## Beyond
+       #
+       #     You can use logic if you want:
+       #
+       if 0:
+          blah
+          blah
+       #
+       #     Ho hum
+       """
+    output = []
+    for piece in DocTestParser().parse(s):
+        if isinstance(piece, Example):
+            # Add the example's source code (strip trailing NL)
+            output.append(piece.source[:-1])
+            # Add the expected output:
+            want = piece.want
+            if want:
+                output.append('# Expected:')
+                output += ['## '+l for l in want.split('\n')[:-1]]
+        else:
+            # Add non-example text.
+            output += [_comment_line(l)
+                       for l in piece.split('\n')[:-1]]
+
+    # Trim junk on both ends.
+    while output and output[-1] == '#':
+        output.pop()
+    while output and output[0] == '#':
+        output.pop(0)
+    # Combine the output, and return it.
+    return '\n'.join(output)
+
+def testsource(module, name):
+    """Extract the test sources from a doctest docstring as a script.
+
+    Provide the module (or dotted name of the module) containing the
+    test to be debugged and the name (within the module) of the object
+    with the doc string with tests to be debugged.
+    """
+    module = _normalize_module(module)
+    tests = DocTestFinder().find(module)
+    test = [t for t in tests if t.name == name]
+    if not test:
+        raise ValueError(name, "not found in tests")
+    test = test[0]
+    testsrc = script_from_examples(test.docstring)
+    return testsrc
+
+def debug_src(src, pm=False, globs=None):
+    """Debug a single doctest docstring, in argument `src`'"""
+    testsrc = script_from_examples(src)
+    debug_script(testsrc, pm, globs)
+
+def debug_script(src, pm=False, globs=None):
+    "Debug a test script.  `src` is the script, as a string."
+    import pdb
+
+    # Note that tempfile.NameTemporaryFile() cannot be used.  As the
+    # docs say, a file so created cannot be opened by name a second time
+    # on modern Windows boxes, and execfile() needs to open it.
+    srcfilename = tempfile.mktemp(".py", "doctestdebug")
+    f = open(srcfilename, 'w')
+    f.write(src)
+    f.close()
+
+    try:
+        if globs:
+            globs = globs.copy()
+        else:
+            globs = {}
+
+        if pm:
+            try:
+                execfile(srcfilename, globs, globs)
+            except:
+                print sys.exc_info()[1]
+                pdb.post_mortem(sys.exc_info()[2])
+        else:
+            # Note that %r is vital here.  '%s' instead can, e.g., cause
+            # backslashes to get treated as metacharacters on Windows.
+            pdb.run("execfile(%r)" % srcfilename, globs, globs)
+
+    finally:
+        os.remove(srcfilename)
+
+def debug(module, name, pm=False):
+    """Debug a single doctest docstring.
+
+    Provide the module (or dotted name of the module) containing the
+    test to be debugged and the name (within the module) of the object
+    with the docstring with tests to be debugged.
+    """
+    module = _normalize_module(module)
+    testsrc = testsource(module, name)
+    debug_script(testsrc, pm, module.__dict__)
+
+######################################################################
+## 10. Example Usage
+######################################################################
+class _TestClass:
+    """
+    A pointless class, for sanity-checking of docstring testing.
+
+    Methods:
+        square()
+        get()
+
+    >>> _TestClass(13).get() + _TestClass(-12).get()
+    1
+    >>> hex(_TestClass(13).square().get())
+    '0xa9'
+    """
+
+    def __init__(self, val):
+        """val -> _TestClass object with associated value val.
+
+        >>> t = _TestClass(123)
+        >>> print t.get()
+        123
+        """
+
+        self.val = val
+
+    def square(self):
+        """square() -> square TestClass's associated value
+
+        >>> _TestClass(13).square().get()
+        169
+        """
+
+        self.val = self.val ** 2
+        return self
+
+    def get(self):
+        """get() -> return TestClass's associated value.
+
+        >>> x = _TestClass(-42)
+        >>> print x.get()
+        -42
+        """
+
+        return self.val
+
+__test__ = {"_TestClass": _TestClass,
+            "string": r"""
+                      Example of a string object, searched as-is.
+                      >>> x = 1; y = 2
+                      >>> x + y, x * y
+                      (3, 2)
+                      """,
+
+            "bool-int equivalence": r"""
+                                    In 2.2, boolean expressions displayed
+                                    0 or 1.  By default, we still accept
+                                    them.  This can be disabled by passing
+                                    DONT_ACCEPT_TRUE_FOR_1 to the new
+                                    optionflags argument.
+                                    >>> 4 == 4
+                                    1
+                                    >>> 4 == 4
+                                    True
+                                    >>> 4 > 4
+                                    0
+                                    >>> 4 > 4
+                                    False
+                                    """,
+
+            "blank lines": r"""
+                Blank lines can be marked with <BLANKLINE>:
+                    >>> print 'foo\n\nbar\n'
+                    foo
+                    <BLANKLINE>
+                    bar
+                    <BLANKLINE>
+            """,
+
+            "ellipsis": r"""
+                If the ellipsis flag is used, then '...' can be used to
+                elide substrings in the desired output:
+                    >>> print range(1000) #doctest: +ELLIPSIS
+                    [0, 1, 2, ..., 999]
+            """,
+
+            "whitespace normalization": r"""
+                If the whitespace normalization flag is used, then
+                differences in whitespace are ignored.
+                    >>> print range(30) #doctest: +NORMALIZE_WHITESPACE
+                    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+                     15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+                     27, 28, 29]
+            """,
+           }
+
+def _test():
+    r = unittest.TextTestRunner()
+    r.run(DocTestSuite())
+
+if __name__ == "__main__":
+    _test()
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/indexes/test_links_priority/external.html b/vendor/distribute-0.6.35/setuptools/tests/indexes/test_links_priority/external.html
new file mode 100644
index 0000000000000000000000000000000000000000..92e4702f634dfb37a404bec3103b76f6afcaa917
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/indexes/test_links_priority/external.html
@@ -0,0 +1,3 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
+</body></html>
diff --git a/vendor/distribute-0.6.35/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/vendor/distribute-0.6.35/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
new file mode 100644
index 0000000000000000000000000000000000000000..fefb028bd3ee7d45a414d6e96a7b2a21ffd7eda7
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
@@ -0,0 +1,4 @@
+<html><body>
+<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
+<a href="../../external.html" rel="homepage">external homepage</a><br/>
+</body></html>
diff --git a/vendor/distribute-0.6.35/setuptools/tests/py26compat.py b/vendor/distribute-0.6.35/setuptools/tests/py26compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4fb891af676421a8eb4be1227c8b3f3d044c01f
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/py26compat.py
@@ -0,0 +1,14 @@
+import unittest
+
+try:
+	# provide skipIf for Python 2.4-2.6
+	skipIf = unittest.skipIf
+except AttributeError:
+	def skipIf(condition, reason):
+		def skipper(func):
+			def skip(*args, **kwargs):
+				return
+			if condition:
+				return skip
+			return func
+		return skipper
diff --git a/vendor/distribute-0.6.35/setuptools/tests/server.py b/vendor/distribute-0.6.35/setuptools/tests/server.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2ab7acc7cc195633522f545ad0b5437c8162654
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/server.py
@@ -0,0 +1,82 @@
+"""Basic http server for tests to simulate PyPI or custom indexes
+"""
+import urllib2
+import sys
+import time
+import threading
+import BaseHTTPServer
+from BaseHTTPServer import HTTPServer
+from SimpleHTTPServer import SimpleHTTPRequestHandler
+
+class IndexServer(HTTPServer):
+    """Basic single-threaded http server simulating a package index
+
+    You can use this server in unittest like this::
+        s = IndexServer()
+        s.start()
+        index_url = s.base_url() + 'mytestindex'
+        # do some test requests to the index
+        # The index files should be located in setuptools/tests/indexes
+        s.stop()
+    """
+    def __init__(self, server_address=('', 0),
+            RequestHandlerClass=SimpleHTTPRequestHandler):
+        HTTPServer.__init__(self, server_address, RequestHandlerClass)
+        self._run = True
+
+    def serve(self):
+        while self._run:
+            self.handle_request()
+
+    def start(self):
+        self.thread = threading.Thread(target=self.serve)
+        self.thread.start()
+
+    def stop(self):
+        "Stop the server"
+
+        # Let the server finish the last request and wait for a new one.
+        time.sleep(0.1)
+
+        # self.shutdown is not supported on python < 2.6, so just
+        #  set _run to false, and make a request, causing it to
+        #  terminate.
+        self._run = False
+        url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
+        try:
+            if sys.version_info >= (2, 6):
+                urllib2.urlopen(url, timeout=5)
+            else:
+                urllib2.urlopen(url)
+        except urllib2.URLError:
+            # ignore any errors; all that's important is the request
+            pass
+        self.thread.join()
+
+    def base_url(self):
+        port = self.server_port
+        return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
+
+class RequestRecorder(BaseHTTPServer.BaseHTTPRequestHandler):
+    def do_GET(self):
+        requests = vars(self.server).setdefault('requests', [])
+        requests.append(self)
+        self.send_response(200, 'OK')
+
+class MockServer(HTTPServer, threading.Thread):
+    """
+    A simple HTTP Server that records the requests made to it.
+    """
+    def __init__(self, server_address=('', 0),
+            RequestHandlerClass=RequestRecorder):
+        HTTPServer.__init__(self, server_address, RequestHandlerClass)
+        threading.Thread.__init__(self)
+        self.setDaemon(True)
+        self.requests = []
+
+    def run(self):
+        self.serve_forever()
+
+    def url(self):
+        return 'http://localhost:%(server_port)s/' % vars(self)
+    url = property(url)
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_bdist_egg.py b/vendor/distribute-0.6.35/setuptools/tests/test_bdist_egg.py
new file mode 100644
index 0000000000000000000000000000000000000000..7da122cc31b7a66898c5b98e79425fb6103e3b8c
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_bdist_egg.py
@@ -0,0 +1,69 @@
+"""develop tests
+"""
+import sys
+import os, re, shutil, tempfile, unittest
+import tempfile
+import site
+from StringIO import StringIO
+
+from distutils.errors import DistutilsError
+from setuptools.command.bdist_egg import bdist_egg
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo', py_modules=['hi'])
+"""
+
+class TestDevelopTest(unittest.TestCase):
+
+    def setUp(self):
+        self.dir = tempfile.mkdtemp()
+        self.old_cwd = os.getcwd()
+        os.chdir(self.dir)
+        f = open('setup.py', 'w')
+        f.write(SETUP_PY)
+        f.close()
+        f = open('hi.py', 'w')
+        f.write('1\n')
+        f.close()
+        if sys.version >= "2.6":
+            self.old_base = site.USER_BASE
+            site.USER_BASE = tempfile.mkdtemp()
+            self.old_site = site.USER_SITE
+            site.USER_SITE = tempfile.mkdtemp()
+
+    def tearDown(self):
+        os.chdir(self.old_cwd)
+        shutil.rmtree(self.dir)
+        if sys.version >= "2.6":
+            shutil.rmtree(site.USER_BASE)
+            shutil.rmtree(site.USER_SITE)
+            site.USER_BASE = self.old_base
+            site.USER_SITE = self.old_site
+
+    def test_bdist_egg(self):
+        dist = Distribution(dict(
+            script_name='setup.py',
+            script_args=['bdist_egg'],
+            name='foo',
+            py_modules=['hi']
+            ))
+        os.makedirs(os.path.join('build', 'src'))
+        old_stdout = sys.stdout
+        sys.stdout = o = StringIO()
+        try:
+            dist.parse_command_line()
+            dist.run_commands()
+        finally:
+            sys.stdout = old_stdout
+
+        # let's see if we got our egg link at the right place
+        [content] = os.listdir('dist')
+        self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content))
+
+def test_suite():
+    return unittest.makeSuite(TestDevelopTest)
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_build_ext.py b/vendor/distribute-0.6.35/setuptools/tests/test_build_ext.py
new file mode 100644
index 0000000000000000000000000000000000000000..a520ced9d6a32cc1b53e6fb26b1f223a2a71798b
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_build_ext.py
@@ -0,0 +1,20 @@
+"""build_ext tests
+"""
+import os, shutil, tempfile, unittest
+from distutils.command.build_ext import build_ext as distutils_build_ext
+from setuptools.command.build_ext import build_ext
+from setuptools.dist import Distribution
+
+class TestBuildExtTest(unittest.TestCase):
+
+    def test_get_ext_filename(self):
+        # setuptools needs to give back the same
+        # result than distutils, even if the fullname
+        # is not in ext_map
+        dist = Distribution()
+        cmd = build_ext(dist)
+        cmd.ext_map['foo/bar'] = ''
+        res = cmd.get_ext_filename('foo')
+        wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
+        assert res == wanted
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_develop.py b/vendor/distribute-0.6.35/setuptools/tests/test_develop.py
new file mode 100644
index 0000000000000000000000000000000000000000..315058c57523b247f4c707d3c0a5a0797a3d5b84
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_develop.py
@@ -0,0 +1,118 @@
+"""develop tests
+"""
+import sys
+import os, shutil, tempfile, unittest
+import tempfile
+import site
+from StringIO import StringIO
+
+from distutils.errors import DistutilsError
+from setuptools.command.develop import develop
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo',
+    packages=['foo'],
+    use_2to3=True,
+)
+"""
+
+INIT_PY = """print "foo"
+"""
+
+class TestDevelopTest(unittest.TestCase):
+
+    def setUp(self):
+        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
+            return
+
+        # Directory structure
+        self.dir = tempfile.mkdtemp()
+        os.mkdir(os.path.join(self.dir, 'foo'))
+        # setup.py
+        setup = os.path.join(self.dir, 'setup.py')
+        f = open(setup, 'w')
+        f.write(SETUP_PY)
+        f.close()
+        self.old_cwd = os.getcwd()
+        # foo/__init__.py
+        init = os.path.join(self.dir, 'foo', '__init__.py')
+        f = open(init, 'w')
+        f.write(INIT_PY)
+        f.close()
+        
+        os.chdir(self.dir)
+        self.old_base = site.USER_BASE
+        site.USER_BASE = tempfile.mkdtemp()
+        self.old_site = site.USER_SITE
+        site.USER_SITE = tempfile.mkdtemp()
+
+    def tearDown(self):
+        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
+            return
+        
+        os.chdir(self.old_cwd)
+        shutil.rmtree(self.dir)
+        shutil.rmtree(site.USER_BASE)
+        shutil.rmtree(site.USER_SITE)
+        site.USER_BASE = self.old_base
+        site.USER_SITE = self.old_site
+
+    def test_develop(self):
+        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
+            return
+        dist = Distribution(
+            dict(name='foo',
+                 packages=['foo'],
+                 use_2to3=True,
+                 version='0.0',
+                 ))
+        dist.script_name = 'setup.py'
+        cmd = develop(dist)
+        cmd.user = 1
+        cmd.ensure_finalized()
+        cmd.install_dir = site.USER_SITE
+        cmd.user = 1
+        old_stdout = sys.stdout
+        #sys.stdout = StringIO()
+        try:
+            cmd.run()
+        finally:
+            sys.stdout = old_stdout
+
+        # let's see if we got our egg link at the right place
+        content = os.listdir(site.USER_SITE)
+        content.sort()
+        self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
+
+        # Check that we are using the right code.
+        egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
+        path = egg_link_file.read().split()[0].strip()
+        egg_link_file.close()
+        init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt')
+        init = init_file.read().strip()
+        init_file.close()
+        if sys.version < "3":
+            self.assertEqual(init, 'print "foo"')
+        else:
+            self.assertEqual(init, 'print("foo")')
+
+    def notest_develop_with_setup_requires(self):
+
+        wanted = ("Could not find suitable distribution for "
+                  "Requirement.parse('I-DONT-EXIST')")
+        old_dir = os.getcwd()
+        os.chdir(self.dir)
+        try:
+            try:
+                dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
+            except DistutilsError, e:
+                error = str(e)
+                if error ==  wanted:
+                    pass
+        finally:
+            os.chdir(old_dir)
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_dist_info.py b/vendor/distribute-0.6.35/setuptools/tests/test_dist_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcb78c36d97de37143b85ceabdba9e9054c756e5
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_dist_info.py
@@ -0,0 +1,80 @@
+"""Test .dist-info style distributions.
+"""
+import os
+import shutil
+import tempfile
+import unittest
+import textwrap
+
+try:
+    import ast
+except:
+    pass
+
+import pkg_resources
+
+from setuptools.tests.py26compat import skipIf
+
+def DALS(s):
+    "dedent and left-strip"
+    return textwrap.dedent(s).lstrip()
+
+class TestDistInfo(unittest.TestCase):
+
+    def test_distinfo(self):
+        dists = {}
+        for d in pkg_resources.find_distributions(self.tmpdir):
+            dists[d.project_name] = d
+
+        assert len(dists) == 2, dists
+
+        unversioned = dists['UnversionedDistribution']
+        versioned = dists['VersionedDistribution']
+
+        assert versioned.version == '2.718' # from filename
+        assert unversioned.version == '0.3' # from METADATA
+
+    @skipIf('ast' not in globals(),
+        "ast is used to test conditional dependencies (Python >= 2.6)")
+    def test_conditional_dependencies(self):
+        requires = [pkg_resources.Requirement.parse('splort==4'),
+                    pkg_resources.Requirement.parse('quux>=1.1')]
+
+        for d in pkg_resources.find_distributions(self.tmpdir):
+            self.assertEqual(d.requires(), requires[:1])
+            self.assertEqual(d.requires(extras=('baz',)), requires)
+            self.assertEqual(d.extras, ['baz'])
+
+    def setUp(self):
+        self.tmpdir = tempfile.mkdtemp()
+        versioned = os.path.join(self.tmpdir,
+                                 'VersionedDistribution-2.718.dist-info')
+        os.mkdir(versioned)
+        metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+')
+        metadata_file.write(DALS(
+            """
+            Metadata-Version: 1.2
+            Name: VersionedDistribution
+            Requires-Dist: splort (4)
+            Provides-Extra: baz
+            Requires-Dist: quux (>=1.1); extra == 'baz'
+            """))
+        metadata_file.close()
+
+        unversioned = os.path.join(self.tmpdir,
+                                   'UnversionedDistribution.dist-info')
+        os.mkdir(unversioned)
+        metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+')
+        metadata_file.write(DALS(
+            """
+            Metadata-Version: 1.2
+            Name: UnversionedDistribution
+            Version: 0.3
+            Requires-Dist: splort (==4)
+            Provides-Extra: baz
+            Requires-Dist: quux (>=1.1); extra == 'baz'
+            """))
+        metadata_file.close()
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_easy_install.py b/vendor/distribute-0.6.35/setuptools/tests/test_easy_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..582219cef989d7e8b1bfc1d514c1e02482bfc5b5
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_easy_install.py
@@ -0,0 +1,460 @@
+"""Easy install Tests
+"""
+import sys
+import os
+import shutil
+import tempfile
+import unittest
+import site
+import textwrap
+import tarfile
+import urlparse
+import StringIO
+import distutils.core
+
+from setuptools.sandbox import run_setup, SandboxViolation
+from setuptools.command.easy_install import easy_install, fix_jython_executable, get_script_args, main
+from setuptools.command.easy_install import  PthDistributions
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+from pkg_resources import Distribution as PRDistribution
+import setuptools.tests.server
+
+try:
+    # import multiprocessing solely for the purpose of testing its existence
+    __import__('multiprocessing')
+    import logging
+    _LOG = logging.getLogger('test_easy_install')
+    logging.basicConfig(level=logging.INFO, stream=sys.stderr)
+    _MULTIPROC = True
+except ImportError:
+    _MULTIPROC = False
+    _LOG = None
+
+class FakeDist(object):
+    def get_entry_map(self, group):
+        if group != 'console_scripts':
+            return {}
+        return {'name': 'ep'}
+
+    def as_requirement(self):
+        return 'spec'
+
+WANTED = """\
+#!%s
+# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
+__requires__ = 'spec'
+import sys
+from pkg_resources import load_entry_point
+
+if __name__ == '__main__':
+    sys.exit(
+        load_entry_point('spec', 'console_scripts', 'name')()
+    )
+""" % fix_jython_executable(sys.executable, "")
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo')
+"""
+
+class TestEasyInstallTest(unittest.TestCase):
+
+    def test_install_site_py(self):
+        dist = Distribution()
+        cmd = easy_install(dist)
+        cmd.sitepy_installed = False
+        cmd.install_dir = tempfile.mkdtemp()
+        try:
+            cmd.install_site_py()
+            sitepy = os.path.join(cmd.install_dir, 'site.py')
+            self.assertTrue(os.path.exists(sitepy))
+        finally:
+            shutil.rmtree(cmd.install_dir)
+
+    def test_get_script_args(self):
+        dist = FakeDist()
+
+        old_platform = sys.platform
+        try:
+            name, script = [i for i in get_script_args(dist).next()][0:2]
+        finally:
+            sys.platform = old_platform
+
+        self.assertEqual(script, WANTED)
+
+    def test_no_setup_cfg(self):
+        # makes sure easy_install as a command (main)
+        # doesn't use a setup.cfg file that is located
+        # in the current working directory
+        dir = tempfile.mkdtemp()
+        setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w')
+        setup_cfg.write('[easy_install]\nfind_links = http://example.com')
+        setup_cfg.close()
+        setup_py = open(os.path.join(dir, 'setup.py'), 'w')
+        setup_py.write(SETUP_PY)
+        setup_py.close()
+
+        from setuptools.dist import Distribution
+
+        def _parse_command_line(self):
+            msg = 'Error: a local setup.cfg was used'
+            opts = self.command_options
+            if 'easy_install' in opts:
+                assert 'find_links' not in opts['easy_install'], msg
+            return self._old_parse_command_line()
+
+        Distribution._old_parse_command_line = Distribution.parse_command_line
+        Distribution.parse_command_line = _parse_command_line
+
+        old_wd = os.getcwd()
+        try:
+            os.chdir(dir)
+            reset_setup_stop_context(
+                lambda: self.assertRaises(SystemExit, main, [])
+            )
+        finally:
+            os.chdir(old_wd)
+            shutil.rmtree(dir)
+            Distribution.parse_command_line = Distribution._old_parse_command_line
+
+    def test_no_find_links(self):
+        # new option '--no-find-links', that blocks find-links added at
+        # the project level
+        dist = Distribution()
+        cmd = easy_install(dist)
+        cmd.check_pth_processing = lambda: True
+        cmd.no_find_links = True
+        cmd.find_links = ['link1', 'link2']
+        cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
+        cmd.args = ['ok']
+        cmd.ensure_finalized()
+        self.assertEqual(cmd.package_index.scanned_urls, {})
+
+        # let's try without it (default behavior)
+        cmd = easy_install(dist)
+        cmd.check_pth_processing = lambda: True
+        cmd.find_links = ['link1', 'link2']
+        cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
+        cmd.args = ['ok']
+        cmd.ensure_finalized()
+        keys = cmd.package_index.scanned_urls.keys()
+        keys.sort()
+        self.assertEqual(keys, ['link1', 'link2'])
+
+
+class TestPTHFileWriter(unittest.TestCase):
+    def test_add_from_cwd_site_sets_dirty(self):
+        '''a pth file manager should set dirty
+        if a distribution is in site but also the cwd
+        '''
+        pth = PthDistributions('does-not_exist', [os.getcwd()])
+        self.assertTrue(not pth.dirty)
+        pth.add(PRDistribution(os.getcwd()))
+        self.assertTrue(pth.dirty)
+
+    def test_add_from_site_is_ignored(self):
+        if os.name != 'nt':
+            location = '/test/location/does-not-have-to-exist'
+        else:
+            location = 'c:\\does_not_exist'
+        pth = PthDistributions('does-not_exist', [location, ])
+        self.assertTrue(not pth.dirty)
+        pth.add(PRDistribution(location))
+        self.assertTrue(not pth.dirty)
+
+
+class TestUserInstallTest(unittest.TestCase):
+
+    def setUp(self):
+        self.dir = tempfile.mkdtemp()
+        setup = os.path.join(self.dir, 'setup.py')
+        f = open(setup, 'w')
+        f.write(SETUP_PY)
+        f.close()
+        self.old_cwd = os.getcwd()
+        os.chdir(self.dir)
+        if sys.version >= "2.6":
+            self.old_has_site = easy_install_pkg.HAS_USER_SITE
+            self.old_file = easy_install_pkg.__file__
+            self.old_base = site.USER_BASE
+            site.USER_BASE = tempfile.mkdtemp()
+            self.old_site = site.USER_SITE
+            site.USER_SITE = tempfile.mkdtemp()
+            easy_install_pkg.__file__ = site.USER_SITE
+
+    def tearDown(self):
+        os.chdir(self.old_cwd)
+        shutil.rmtree(self.dir)
+        if sys.version >= "2.6":
+            shutil.rmtree(site.USER_BASE)
+            shutil.rmtree(site.USER_SITE)
+            site.USER_BASE = self.old_base
+            site.USER_SITE = self.old_site
+            easy_install_pkg.HAS_USER_SITE = self.old_has_site
+            easy_install_pkg.__file__ = self.old_file
+
+    def test_user_install_implied(self):
+        easy_install_pkg.HAS_USER_SITE = True # disabled sometimes
+        #XXX: replace with something meaningfull
+        if sys.version < "2.6":
+            return #SKIP
+        dist = Distribution()
+        dist.script_name = 'setup.py'
+        cmd = easy_install(dist)
+        cmd.args = ['py']
+        cmd.ensure_finalized()
+        self.assertTrue(cmd.user, 'user should be implied')
+
+    def test_multiproc_atexit(self):
+        if not _MULTIPROC:
+            return
+        _LOG.info('this should not break')
+
+    def test_user_install_not_implied_without_usersite_enabled(self):
+        easy_install_pkg.HAS_USER_SITE = False # usually enabled
+        #XXX: replace with something meaningfull
+        if sys.version < "2.6":
+            return #SKIP
+        dist = Distribution()
+        dist.script_name = 'setup.py'
+        cmd = easy_install(dist)
+        cmd.args = ['py']
+        cmd.initialize_options()
+        self.assertFalse(cmd.user, 'NOT user should be implied')
+
+    def test_local_index(self):
+        # make sure the local index is used
+        # when easy_install looks for installed
+        # packages
+        new_location = tempfile.mkdtemp()
+        target = tempfile.mkdtemp()
+        egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
+        f = open(egg_file, 'w')
+        try:
+            f.write('Name: foo\n')
+        finally:
+            f.close()
+
+        sys.path.append(target)
+        old_ppath = os.environ.get('PYTHONPATH')
+        os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path)
+        try:
+            dist = Distribution()
+            dist.script_name = 'setup.py'
+            cmd = easy_install(dist)
+            cmd.install_dir = target
+            cmd.args = ['foo']
+            cmd.ensure_finalized()
+            cmd.local_index.scan([new_location])
+            res = cmd.easy_install('foo')
+            self.assertEqual(os.path.realpath(res.location),
+                             os.path.realpath(new_location))
+        finally:
+            sys.path.remove(target)
+            for basedir in [new_location, target, ]:
+                if not os.path.exists(basedir) or not os.path.isdir(basedir):
+                    continue
+                try:
+                    shutil.rmtree(basedir)
+                except:
+                    pass
+            if old_ppath is not None:
+                os.environ['PYTHONPATH'] = old_ppath
+            else:
+                del os.environ['PYTHONPATH']
+
+    def test_setup_requires(self):
+        """Regression test for issue #318
+
+        Ensures that a package with setup_requires can be installed when
+        distribute is installed in the user site-packages without causing a
+        SandboxViolation.
+        """
+
+        test_setup_attrs = {
+            'name': 'test_pkg', 'version': '0.0',
+            'setup_requires': ['foobar'],
+            'dependency_links': [os.path.abspath(self.dir)]
+        }
+
+        test_pkg = os.path.join(self.dir, 'test_pkg')
+        test_setup_py = os.path.join(test_pkg, 'setup.py')
+        test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
+        os.mkdir(test_pkg)
+
+        f = open(test_setup_py, 'w')
+        f.write(textwrap.dedent("""\
+            import setuptools
+            setuptools.setup(**%r)
+        """ % test_setup_attrs))
+        f.close()
+
+        foobar_path = os.path.join(self.dir, 'foobar-0.1.tar.gz')
+        make_trivial_sdist(
+            foobar_path,
+            textwrap.dedent("""\
+                import setuptools
+                setuptools.setup(
+                    name='foobar',
+                    version='0.1'
+                )
+            """))
+
+        old_stdout = sys.stdout
+        old_stderr = sys.stderr
+        sys.stdout = StringIO.StringIO()
+        sys.stderr = StringIO.StringIO()
+        try:
+            reset_setup_stop_context(
+                lambda: run_setup(test_setup_py, ['install'])
+            )
+        except SandboxViolation:
+            self.fail('Installation caused SandboxViolation')
+        finally:
+            sys.stdout = old_stdout
+            sys.stderr = old_stderr
+
+
+class TestSetupRequires(unittest.TestCase):
+
+    def test_setup_requires_honors_fetch_params(self):
+        """
+        When easy_install installs a source distribution which specifies
+        setup_requires, it should honor the fetch parameters (such as
+        allow-hosts, index-url, and find-links).
+        """
+        # set up a server which will simulate an alternate package index.
+        p_index = setuptools.tests.server.MockServer()
+        p_index.start()
+        netloc = 1
+        p_index_loc = urlparse.urlparse(p_index.url)[netloc]
+        if p_index_loc.endswith(':0'):
+            # Some platforms (Jython) don't find a port to which to bind,
+            #  so skip this test for them.
+            return
+
+        # I realize this is all-but-impossible to read, because it was
+        #  ported from some well-factored, safe code using 'with'. If you
+        #  need to maintain this code, consider making the changes in
+        #  the parent revision (of this comment) and then port the changes
+        #  back for Python 2.4 (or deprecate Python 2.4).
+
+        def install(dist_file):
+            def install_at(temp_install_dir):
+                def install_env():
+                    ei_params = ['--index-url', p_index.url,
+                        '--allow-hosts', p_index_loc,
+                        '--exclude-scripts', '--install-dir', temp_install_dir,
+                        dist_file]
+                    def install_clean_reset():
+                        def install_clean_argv():
+                            # attempt to install the dist. It should fail because
+                            #  it doesn't exist.
+                            self.assertRaises(SystemExit,
+                                easy_install_pkg.main, ei_params)
+                        argv_context(install_clean_argv, ['easy_install'])
+                    reset_setup_stop_context(install_clean_reset)
+                environment_context(install_env, PYTHONPATH=temp_install_dir)
+            tempdir_context(install_at)
+
+        # create an sdist that has a build-time dependency.
+        self.create_sdist(install)
+
+        # there should have been two or three requests to the server
+        #  (three happens on Python 3.3a)
+        self.assertTrue(2 <= len(p_index.requests) <= 3)
+        self.assertEqual(p_index.requests[0].path, '/does-not-exist/')
+
+    def create_sdist(self, installer):
+        """
+        Create an sdist with a setup_requires dependency (of something that
+        doesn't exist) and invoke installer on it.
+        """
+        def build_sdist(dir):
+            dist_path = os.path.join(dir, 'distribute-test-fetcher-1.0.tar.gz')
+            make_trivial_sdist(
+                dist_path,
+                textwrap.dedent("""
+                    import setuptools
+                    setuptools.setup(
+                        name="distribute-test-fetcher",
+                        version="1.0",
+                        setup_requires = ['does-not-exist'],
+                    )
+                """).lstrip())
+            installer(dist_path)
+        tempdir_context(build_sdist)
+
+
+def make_trivial_sdist(dist_path, setup_py):
+    """Create a simple sdist tarball at dist_path, containing just a
+    setup.py, the contents of which are provided by the setup_py string.
+    """
+
+    setup_py_file = tarfile.TarInfo(name='setup.py')
+    try:
+        # Python 3 (StringIO gets converted to io module)
+        MemFile = StringIO.BytesIO
+    except AttributeError:
+        MemFile = StringIO.StringIO
+    setup_py_bytes = MemFile(setup_py.encode('utf-8'))
+    setup_py_file.size = len(setup_py_bytes.getvalue())
+    dist = tarfile.open(dist_path, 'w:gz')
+    try:
+        dist.addfile(setup_py_file, fileobj=setup_py_bytes)
+    finally:
+        dist.close()
+
+
+def tempdir_context(f, cd=lambda dir:None):
+    """
+    Invoke f in the context
+    """
+    temp_dir = tempfile.mkdtemp()
+    orig_dir = os.getcwd()
+    try:
+        cd(temp_dir)
+        f(temp_dir)
+    finally:
+        cd(orig_dir)
+        shutil.rmtree(temp_dir)
+
+def environment_context(f, **updates):
+    """
+    Invoke f in the context
+    """
+    old_env = os.environ.copy()
+    os.environ.update(updates)
+    try:
+        f()
+    finally:
+        for key in updates:
+            del os.environ[key]
+        os.environ.update(old_env)
+
+def argv_context(f, repl):
+    """
+    Invoke f in the context
+    """
+    old_argv = sys.argv[:]
+    sys.argv[:] = repl
+    try:
+        f()
+    finally:
+        sys.argv[:] = old_argv
+
+def reset_setup_stop_context(f):
+    """
+    When the distribute tests are run using setup.py test, and then
+    one wants to invoke another setup() command (such as easy_install)
+    within those tests, it's necessary to reset the global variable
+    in distutils.core so that the setup() command will run naturally.
+    """
+    setup_stop_after = distutils.core._setup_stop_after
+    distutils.core._setup_stop_after = None
+    try:
+        f()
+    finally:
+        distutils.core._setup_stop_after = setup_stop_after
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_markerlib.py b/vendor/distribute-0.6.35/setuptools/tests/test_markerlib.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa461846b641d825258bea056ecf990eeec985c0
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_markerlib.py
@@ -0,0 +1,64 @@
+import os
+import unittest
+from setuptools.tests.py26compat import skipIf
+
+try:
+    import ast
+except ImportError:
+    pass
+
+class TestMarkerlib(unittest.TestCase):
+
+    @skipIf('ast' not in globals(),
+        "ast not available (Python < 2.6?)")
+    def test_markers(self):
+        from _markerlib import interpret, default_environment, compile
+        
+        os_name = os.name
+        
+        self.assertTrue(interpret(""))
+        
+        self.assertTrue(interpret("os.name != 'buuuu'"))
+        self.assertTrue(interpret("python_version > '1.0'"))
+        self.assertTrue(interpret("python_version < '5.0'"))
+        self.assertTrue(interpret("python_version <= '5.0'"))
+        self.assertTrue(interpret("python_version >= '1.0'"))
+        self.assertTrue(interpret("'%s' in os.name" % os_name))
+        self.assertTrue(interpret("'buuuu' not in os.name"))
+        
+        self.assertFalse(interpret("os.name == 'buuuu'"))
+        self.assertFalse(interpret("python_version < '1.0'"))
+        self.assertFalse(interpret("python_version > '5.0'"))
+        self.assertFalse(interpret("python_version >= '5.0'"))
+        self.assertFalse(interpret("python_version <= '1.0'"))
+        self.assertFalse(interpret("'%s' not in os.name" % os_name))
+        self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'"))    
+        
+        environment = default_environment()
+        environment['extra'] = 'test'
+        self.assertTrue(interpret("extra == 'test'", environment))
+        self.assertFalse(interpret("extra == 'doc'", environment))
+        
+        def raises_nameError():
+            try:
+                interpret("python.version == '42'")
+            except NameError:
+                pass
+            else:
+                raise Exception("Expected NameError")
+        
+        raises_nameError()
+        
+        def raises_syntaxError():
+            try:
+                interpret("(x for x in (4,))")
+            except SyntaxError:
+                pass
+            else:
+                raise Exception("Expected SyntaxError")
+            
+        raises_syntaxError()
+        
+        statement = "python_version == '5'"
+        self.assertEqual(compile(statement).__doc__, statement)
+        
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_packageindex.py b/vendor/distribute-0.6.35/setuptools/tests/test_packageindex.py
new file mode 100644
index 0000000000000000000000000000000000000000..3e446b54d4aabb325648ee7a5eebd6b3e22e6701
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_packageindex.py
@@ -0,0 +1,145 @@
+"""Package Index Tests
+"""
+import sys
+import unittest
+import urllib2
+import pkg_resources
+import httplib
+import distutils.errors
+import setuptools.package_index
+from server import IndexServer
+
+class TestPackageIndex(unittest.TestCase):
+
+    def test_bad_url_bad_port(self):
+        index = setuptools.package_index.PackageIndex()
+        url = 'http://127.0.0.1:0/nonesuch/test_package_index'
+        try:
+            v = index.open_url(url)
+        except Exception, v:
+            self.assertTrue(url in str(v))
+        else:
+            self.assertTrue(isinstance(v,urllib2.HTTPError))
+
+    def test_bad_url_typo(self):
+        # issue 16
+        # easy_install inquant.contentmirror.plone breaks because of a typo
+        # in its home URL
+        index = setuptools.package_index.PackageIndex(
+            hosts=('www.example.com',)
+        )
+
+        url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
+        try:
+            v = index.open_url(url)
+        except Exception, v:
+            self.assertTrue(url in str(v))
+        else:
+            self.assertTrue(isinstance(v, urllib2.HTTPError))
+
+    def test_bad_url_bad_status_line(self):
+        index = setuptools.package_index.PackageIndex(
+            hosts=('www.example.com',)
+        )
+
+        def _urlopen(*args):
+            import httplib
+            raise httplib.BadStatusLine('line')
+
+        old_urlopen = urllib2.urlopen
+        urllib2.urlopen = _urlopen
+        url = 'http://example.com'
+        try:
+            try:
+                v = index.open_url(url)
+            except Exception, v:
+                self.assertTrue('line' in str(v))
+            else:
+                raise AssertionError('Should have raise here!')
+        finally:
+            urllib2.urlopen = old_urlopen
+
+    def test_bad_url_double_scheme(self):
+        """
+        A bad URL with a double scheme should raise a DistutilsError.
+        """
+        index = setuptools.package_index.PackageIndex(
+            hosts=('www.example.com',)
+        )
+
+        # issue 20
+        url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
+        try:
+            index.open_url(url)
+        except distutils.errors.DistutilsError, error:
+            msg = unicode(error)
+            assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
+            return
+        raise RuntimeError("Did not raise")
+
+    def test_bad_url_screwy_href(self):
+        index = setuptools.package_index.PackageIndex(
+            hosts=('www.example.com',)
+        )
+
+        # issue #160
+        if sys.version_info[0] == 2 and sys.version_info[1] == 7:
+            # this should not fail
+            url = 'http://example.com'
+            page = ('<a href="http://www.famfamfam.com]('
+                    'http://www.famfamfam.com/">')
+            index.process_index(url, page)
+
+    def test_url_ok(self):
+        index = setuptools.package_index.PackageIndex(
+            hosts=('www.example.com',)
+        )
+        url = 'file:///tmp/test_package_index'
+        self.assertTrue(index.url_ok(url, True))
+
+    def test_links_priority(self):
+        """
+        Download links from the pypi simple index should be used before
+        external download links.
+        http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
+
+        Usecase :
+        - someone uploads a package on pypi, a md5 is generated
+        - someone manually copies this link (with the md5 in the url) onto an
+          external page accessible from the package page.
+        - someone reuploads the package (with a different md5)
+        - while easy_installing, an MD5 error occurs because the external link
+          is used
+        -> Distribute should use the link from pypi, not the external one.
+        """
+        if sys.platform.startswith('java'):
+            # Skip this test on jython because binding to :0 fails
+            return
+
+        # start an index server
+        server = IndexServer()
+        server.start()
+        index_url = server.base_url() + 'test_links_priority/simple/'
+
+        # scan a test index
+        pi = setuptools.package_index.PackageIndex(index_url)
+        requirement = pkg_resources.Requirement.parse('foobar')
+        pi.find_packages(requirement)
+        server.stop()
+
+        # the distribution has been found
+        self.assertTrue('foobar' in pi)
+        # we have only one link, because links are compared without md5
+        self.assertTrue(len(pi['foobar'])==1)
+        # the link should be from the index
+        self.assertTrue('correct_md5' in pi['foobar'][0].location)
+
+    def test_parse_bdist_wininst(self):
+        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
+            'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
+        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
+            'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
+        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
+            'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
+        self.assertEqual(setuptools.package_index.parse_bdist_wininst(
+            'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_resources.py b/vendor/distribute-0.6.35/setuptools/tests/test_resources.py
new file mode 100644
index 0000000000000000000000000000000000000000..292b78d1ab699b8a6e4ee8ad766c91f016a29b92
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_resources.py
@@ -0,0 +1,645 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove
+from unittest import TestCase, makeSuite; from pkg_resources import *
+from setuptools.command.easy_install import get_script_header, is_sh
+import os, pkg_resources, sys, StringIO, tempfile, shutil
+try: frozenset
+except NameError:
+    from sets import ImmutableSet as frozenset
+
+def safe_repr(obj, short=False):
+    """ copied from Python2.7"""
+    try:
+        result = repr(obj)
+    except Exception:
+        result = object.__repr__(obj)
+    if not short or len(result) < _MAX_LENGTH:
+        return result
+    return result[:_MAX_LENGTH] + ' [truncated]...'
+
+class Metadata(EmptyProvider):
+    """Mock object to return metadata as if from an on-disk distribution"""
+
+    def __init__(self,*pairs):
+        self.metadata = dict(pairs)
+
+    def has_metadata(self,name):
+        return name in self.metadata
+
+    def get_metadata(self,name):
+        return self.metadata[name]
+
+    def get_metadata_lines(self,name):
+        return yield_lines(self.get_metadata(name))
+
+class DistroTests(TestCase):
+
+    def testCollection(self):
+        # empty path should produce no distributions
+        ad = Environment([], platform=None, python=None)
+        self.assertEqual(list(ad), [])
+        self.assertEqual(ad['FooPkg'],[])
+        ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
+        ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
+        ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
+
+        # Name is in there now
+        self.assertTrue(ad['FooPkg'])
+        # But only 1 package
+        self.assertEqual(list(ad), ['foopkg'])
+
+        # Distributions sort by version
+        self.assertEqual(
+            [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
+        )
+        # Removing a distribution leaves sequence alone
+        ad.remove(ad['FooPkg'][1])
+        self.assertEqual(
+            [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
+        )
+        # And inserting adds them in order
+        ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
+        self.assertEqual(
+            [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
+        )
+
+        ws = WorkingSet([])
+        foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
+        foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
+        req, = parse_requirements("FooPkg>=1.3")
+
+        # Nominal case: no distros on path, should yield all applicable
+        self.assertEqual(ad.best_match(req,ws).version, '1.9')
+        # If a matching distro is already installed, should return only that
+        ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
+
+        # If the first matching distro is unsuitable, it's a version conflict
+        ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
+        self.assertRaises(VersionConflict, ad.best_match, req, ws)
+
+        # If more than one match on the path, the first one takes precedence
+        ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
+        self.assertEqual(ad.best_match(req,ws).version, '1.4')
+
+    def checkFooPkg(self,d):
+        self.assertEqual(d.project_name, "FooPkg")
+        self.assertEqual(d.key, "foopkg")
+        self.assertEqual(d.version, "1.3-1")
+        self.assertEqual(d.py_version, "2.4")
+        self.assertEqual(d.platform, "win32")
+        self.assertEqual(d.parsed_version, parse_version("1.3-1"))
+
+    def testDistroBasics(self):
+        d = Distribution(
+            "/some/path",
+            project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
+        )
+        self.checkFooPkg(d)
+
+        d = Distribution("/some/path")
+        self.assertEqual(d.py_version, sys.version[:3])
+        self.assertEqual(d.platform, None)
+
+    def testDistroParse(self):
+        d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
+        self.checkFooPkg(d)
+        d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
+        self.checkFooPkg(d)
+
+    def testDistroMetadata(self):
+        d = Distribution(
+            "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
+            metadata = Metadata(
+                ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
+            )
+        )
+        self.checkFooPkg(d)
+
+
+    def distRequires(self, txt):
+        return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
+
+    def checkRequires(self, dist, txt, extras=()):
+        self.assertEqual(
+            list(dist.requires(extras)),
+            list(parse_requirements(txt))
+        )
+
+    def testDistroDependsSimple(self):
+        for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
+            self.checkRequires(self.distRequires(v), v)
+
+
+    def testResolve(self):
+        ad = Environment([]); ws = WorkingSet([])
+        # Resolving no requirements -> nothing to install
+        self.assertEqual( list(ws.resolve([],ad)), [] )
+        # Request something not in the collection -> DistributionNotFound
+        self.assertRaises(
+            DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
+        )
+        Foo = Distribution.from_filename(
+            "/foo_dir/Foo-1.2.egg",
+            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
+        )
+        ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
+
+        # Request thing(s) that are available -> list to activate
+        for i in range(3):
+            targets = list(ws.resolve(parse_requirements("Foo"), ad))
+            self.assertEqual(targets, [Foo])
+            map(ws.add,targets)
+        self.assertRaises(VersionConflict, ws.resolve,
+            parse_requirements("Foo==0.9"), ad)
+        ws = WorkingSet([]) # reset
+
+        # Request an extra that causes an unresolved dependency for "Baz"
+        self.assertRaises(
+            DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
+        )
+        Baz = Distribution.from_filename(
+            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
+        )
+        ad.add(Baz)
+
+        # Activation list now includes resolved dependency
+        self.assertEqual(
+            list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
+        )
+        # Requests for conflicting versions produce VersionConflict
+        self.assertRaises( VersionConflict,
+            ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
+        )
+
+    def testDistroDependsOptions(self):
+        d = self.distRequires("""
+            Twisted>=1.5
+            [docgen]
+            ZConfig>=2.0
+            docutils>=0.3
+            [fastcgi]
+            fcgiapp>=0.1""")
+        self.checkRequires(d,"Twisted>=1.5")
+        self.checkRequires(
+            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
+            ["docgen","fastcgi"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
+            ["fastcgi", "docgen"]
+        )
+        self.assertRaises(UnknownExtra, d.requires, ["foo"])
+
+    def testSetuptoolsDistributeCombination(self):
+        # Ensure that installing a 0.7-series setuptools fails.  PJE says that
+        # it will not co-exist.
+        ws = WorkingSet([])
+        d = Distribution(
+            "/some/path",
+            project_name="setuptools",
+            version="0.7a1")
+        self.assertRaises(ValueError, ws.add, d)
+        # A 0.6-series is no problem
+        d2 = Distribution(
+            "/some/path",
+            project_name="setuptools",
+            version="0.6c9")
+        ws.add(d2)
+
+        # a unexisting version needs to work
+        ws = WorkingSet([])
+        d3 = Distribution(
+            "/some/path",
+            project_name="setuptools")
+        ws.add(d3)
+
+
+class EntryPointTests(TestCase):
+
+    def assertfields(self, ep):
+        self.assertEqual(ep.name,"foo")
+        self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
+        self.assertEqual(ep.attrs, ("EntryPointTests",))
+        self.assertEqual(ep.extras, ("x",))
+        self.assertTrue(ep.load() is EntryPointTests)
+        self.assertEqual(
+            str(ep),
+            "foo = setuptools.tests.test_resources:EntryPointTests [x]"
+        )
+
+    def setUp(self):
+        self.dist = Distribution.from_filename(
+            "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
+
+    def testBasics(self):
+        ep = EntryPoint(
+            "foo", "setuptools.tests.test_resources", ["EntryPointTests"],
+            ["x"], self.dist
+        )
+        self.assertfields(ep)
+
+    def testParse(self):
+        s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
+        ep = EntryPoint.parse(s, self.dist)
+        self.assertfields(ep)
+
+        ep = EntryPoint.parse("bar baz=  spammity[PING]")
+        self.assertEqual(ep.name,"bar baz")
+        self.assertEqual(ep.module_name,"spammity")
+        self.assertEqual(ep.attrs, ())
+        self.assertEqual(ep.extras, ("ping",))
+
+        ep = EntryPoint.parse(" fizzly =  wocka:foo")
+        self.assertEqual(ep.name,"fizzly")
+        self.assertEqual(ep.module_name,"wocka")
+        self.assertEqual(ep.attrs, ("foo",))
+        self.assertEqual(ep.extras, ())
+
+    def testRejects(self):
+        for ep in [
+            "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
+        ]:
+            try: EntryPoint.parse(ep)
+            except ValueError: pass
+            else: raise AssertionError("Should've been bad", ep)
+
+    def checkSubMap(self, m):
+        self.assertEqual(len(m), len(self.submap_expect))
+        for key, ep in self.submap_expect.iteritems():
+            self.assertEqual(repr(m.get(key)), repr(ep))
+
+    submap_expect = dict(
+        feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
+        feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
+        feature3=EntryPoint('feature3', 'this.module', extras=['something'])
+    )
+    submap_str = """
+            # define features for blah blah
+            feature1 = somemodule:somefunction
+            feature2 = another.module:SomeClass [extra1,extra2]
+            feature3 = this.module [something]
+    """
+
+    def testParseList(self):
+        self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
+        self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
+        self.assertRaises(ValueError, EntryPoint.parse_group, "x",
+            ["foo=baz", "foo=bar"])
+
+    def testParseMap(self):
+        m = EntryPoint.parse_map({'xyz':self.submap_str})
+        self.checkSubMap(m['xyz'])
+        self.assertEqual(m.keys(),['xyz'])
+        m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
+        self.checkSubMap(m['xyz'])
+        self.assertEqual(m.keys(),['xyz'])
+        self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
+        self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
+
+class RequirementsTests(TestCase):
+
+    def testBasics(self):
+        r = Requirement.parse("Twisted>=1.2")
+        self.assertEqual(str(r),"Twisted>=1.2")
+        self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
+        self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
+        self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
+        self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
+        self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
+        self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
+        self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
+
+    def testOrdering(self):
+        r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
+        r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
+        self.assertEqual(r1,r2)
+        self.assertEqual(str(r1),str(r2))
+        self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
+
+    def testBasicContains(self):
+        r = Requirement("Twisted", [('>=','1.2')], ())
+        foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
+        twist11  = Distribution.from_filename("Twisted-1.1.egg")
+        twist12  = Distribution.from_filename("Twisted-1.2.egg")
+        self.assertTrue(parse_version('1.2') in r)
+        self.assertTrue(parse_version('1.1') not in r)
+        self.assertTrue('1.2' in r)
+        self.assertTrue('1.1' not in r)
+        self.assertTrue(foo_dist not in r)
+        self.assertTrue(twist11 not in r)
+        self.assertTrue(twist12 in r)
+
+    def testAdvancedContains(self):
+        r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
+        for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
+            self.assertTrue(v in r, (v,r))
+        for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
+            self.assertTrue(v not in r, (v,r))
+
+
+    def testOptionsAndHashing(self):
+        r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
+        r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
+        r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
+        self.assertEqual(r1,r2)
+        self.assertEqual(r1,r3)
+        self.assertEqual(r1.extras, ("foo","bar"))
+        self.assertEqual(r2.extras, ("bar","foo"))  # extras are normalized
+        self.assertEqual(hash(r1), hash(r2))
+        self.assertEqual(
+            hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
+                            frozenset(["foo","bar"])))
+        )
+
+    def testVersionEquality(self):
+        r1 = Requirement.parse("foo==0.3a2")
+        r2 = Requirement.parse("foo!=0.3a4")
+        d = Distribution.from_filename
+
+        self.assertTrue(d("foo-0.3a4.egg") not in r1)
+        self.assertTrue(d("foo-0.3a1.egg") not in r1)
+        self.assertTrue(d("foo-0.3a4.egg") not in r2)
+
+        self.assertTrue(d("foo-0.3a2.egg") in r1)
+        self.assertTrue(d("foo-0.3a2.egg") in r2)
+        self.assertTrue(d("foo-0.3a3.egg") in r2)
+        self.assertTrue(d("foo-0.3a5.egg") in r2)
+
+    def testDistributeSetuptoolsOverride(self):
+        # Plain setuptools or distribute mean we return distribute.
+        self.assertEqual(
+            Requirement.parse('setuptools').project_name, 'distribute')
+        self.assertEqual(
+            Requirement.parse('distribute').project_name, 'distribute')
+        # setuptools lower than 0.7 means distribute
+        self.assertEqual(
+            Requirement.parse('setuptools==0.6c9').project_name, 'distribute')
+        self.assertEqual(
+            Requirement.parse('setuptools==0.6c10').project_name, 'distribute')
+        self.assertEqual(
+            Requirement.parse('setuptools>=0.6').project_name, 'distribute')
+        self.assertEqual(
+            Requirement.parse('setuptools < 0.7').project_name, 'distribute')
+        # setuptools 0.7 and higher means setuptools.
+        self.assertEqual(
+            Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
+        self.assertEqual(
+            Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
+        self.assertEqual(
+            Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
+
+
+
+
+
+
+
+
+
+
+
+class ParseTests(TestCase):
+
+    def testEmptyParse(self):
+        self.assertEqual(list(parse_requirements('')), [])
+
+    def testYielding(self):
+        for inp,out in [
+            ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
+            (['x\n\n','y'], ['x','y']),
+        ]:
+            self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
+
+    def testSplitting(self):
+        self.assertEqual(
+            list(
+                pkg_resources.split_sections("""
+                    x
+                    [Y]
+                    z
+
+                    a
+                    [b ]
+                    # foo
+                    c
+                    [ d]
+                    [q]
+                    v
+                    """
+                )
+            ),
+            [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
+        )
+        self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
+
+    def testSafeName(self):
+        self.assertEqual(safe_name("adns-python"), "adns-python")
+        self.assertEqual(safe_name("WSGI Utils"),  "WSGI-Utils")
+        self.assertEqual(safe_name("WSGI  Utils"), "WSGI-Utils")
+        self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
+        self.assertNotEqual(safe_name("peak.web"), "peak-web")
+
+    def testSafeVersion(self):
+        self.assertEqual(safe_version("1.2-1"), "1.2-1")
+        self.assertEqual(safe_version("1.2 alpha"),  "1.2.alpha")
+        self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
+        self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
+        self.assertEqual(safe_version("peak.web"), "peak.web")
+
+    def testSimpleRequirements(self):
+        self.assertEqual(
+            list(parse_requirements('Twis-Ted>=1.2-1')),
+            [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
+        )
+        self.assertEqual(
+            list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
+            [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
+        )
+        self.assertEqual(
+            Requirement.parse("FooBar==1.99a3"),
+            Requirement("FooBar", [('==','1.99a3')], ())
+        )
+        self.assertRaises(ValueError,Requirement.parse,">=2.3")
+        self.assertRaises(ValueError,Requirement.parse,"x\\")
+        self.assertRaises(ValueError,Requirement.parse,"x==2 q")
+        self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
+        self.assertRaises(ValueError,Requirement.parse,"#")
+
+    def testVersionEquality(self):
+        def c(s1,s2):
+            p1, p2 = parse_version(s1),parse_version(s2)
+            self.assertEqual(p1,p2, (s1,s2,p1,p2))
+
+        c('1.2-rc1', '1.2rc1')
+        c('0.4', '0.4.0')
+        c('0.4.0.0', '0.4.0')
+        c('0.4.0-0', '0.4-0')
+        c('0pl1', '0.0pl1')
+        c('0pre1', '0.0c1')
+        c('0.0.0preview1', '0c1')
+        c('0.0c1', '0-rc1')
+        c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
+
+    def testVersionOrdering(self):
+        def c(s1,s2):
+            p1, p2 = parse_version(s1),parse_version(s2)
+            self.assertTrue(p1<p2, (s1,s2,p1,p2))
+
+        c('2.1','2.1.1')
+        c('2a1','2b0')
+        c('2a1','2.1')
+        c('2.3a1', '2.3')
+        c('2.1-1', '2.1-2')
+        c('2.1-1', '2.1.1')
+        c('2.1', '2.1pl4')
+        c('2.1a0-20040501', '2.1')
+        c('1.1', '02.1')
+        c('A56','B27')
+        c('3.2', '3.2.pl0')
+        c('3.2-1', '3.2pl1')
+        c('3.2pl1', '3.2pl1-1')
+        c('0.4', '4.0')
+        c('0.0.4', '0.4.0')
+        c('0pl1', '0.4pl1')
+        c('2.1.0-rc1','2.1.0')
+        c('2.1dev','2.1a0')
+
+        torture ="""
+        0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
+        0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
+        0.77.2-1 0.77.1-1 0.77.0-1
+        """.split()
+
+        for p,v1 in enumerate(torture):
+            for v2 in torture[p+1:]:
+                c(v2,v1)
+
+
+
+
+
+
+
+
+class ScriptHeaderTests(TestCase):
+    non_ascii_exe = '/Users/José/bin/python'
+
+    def test_get_script_header(self):
+        if not sys.platform.startswith('java') or not is_sh(sys.executable):
+            # This test is for non-Jython platforms
+            self.assertEqual(get_script_header('#!/usr/local/bin/python'),
+                             '#!%s\n' % os.path.normpath(sys.executable))
+            self.assertEqual(get_script_header('#!/usr/bin/python -x'),
+                             '#!%s  -x\n' % os.path.normpath(sys.executable))
+            self.assertEqual(get_script_header('#!/usr/bin/python',
+                                               executable=self.non_ascii_exe),
+                             '#!%s -x\n' % self.non_ascii_exe)
+
+    def test_get_script_header_jython_workaround(self):
+        # This test doesn't work with Python 3 in some locales
+        if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
+            in (None, "C", "POSIX")):
+            return
+
+        class java:
+            class lang:
+                class System:
+                    @staticmethod
+                    def getProperty(property):
+                        return ""
+        sys.modules["java"] = java
+
+        platform = sys.platform
+        sys.platform = 'java1.5.0_13'
+        stdout = sys.stdout
+        try:
+            # A mock sys.executable that uses a shebang line (this file)
+            exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
+            self.assertEqual(
+                get_script_header('#!/usr/local/bin/python', executable=exe),
+                '#!/usr/bin/env %s\n' % exe)
+
+            # Ensure we generate what is basically a broken shebang line
+            # when there's options, with a warning emitted
+            sys.stdout = sys.stderr = StringIO.StringIO()
+            self.assertEqual(get_script_header('#!/usr/bin/python -x',
+                                               executable=exe),
+                             '#!%s  -x\n' % exe)
+            self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
+            sys.stdout = sys.stderr = StringIO.StringIO()
+            self.assertEqual(get_script_header('#!/usr/bin/python',
+                                               executable=self.non_ascii_exe),
+                             '#!%s -x\n' % self.non_ascii_exe)
+            self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue())
+        finally:
+            del sys.modules["java"]
+            sys.platform = platform
+            sys.stdout = stdout
+
+
+
+
+class NamespaceTests(TestCase):
+
+    def setUp(self):
+        self._ns_pkgs = pkg_resources._namespace_packages.copy()
+        self._tmpdir = tempfile.mkdtemp(prefix="tests-distribute-")
+        os.makedirs(os.path.join(self._tmpdir, "site-pkgs"))
+        self._prev_sys_path = sys.path[:]
+        sys.path.append(os.path.join(self._tmpdir, "site-pkgs"))
+
+    def tearDown(self):
+        shutil.rmtree(self._tmpdir)
+        pkg_resources._namespace_packages = self._ns_pkgs.copy()
+        sys.path = self._prev_sys_path[:]
+
+    def _assertIn(self, member, container):
+        """ assertIn and assertTrue does not exist in Python2.3"""
+        if member not in container:
+            standardMsg = '%s not found in %s' % (safe_repr(member),
+                                                  safe_repr(container))
+            self.fail(self._formatMessage(msg, standardMsg))
+
+    def test_two_levels_deep(self):
+        """
+        Test nested namespace packages
+        Create namespace packages in the following tree :
+            site-packages-1/pkg1/pkg2
+            site-packages-2/pkg1/pkg2
+        Check both are in the _namespace_packages dict and that their __path__
+        is correct
+        """
+        sys.path.append(os.path.join(self._tmpdir, "site-pkgs2"))
+        os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"))
+        os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2"))
+        ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
+        for site in ["site-pkgs", "site-pkgs2"]:
+            pkg1_init = open(os.path.join(self._tmpdir, site,
+                             "pkg1", "__init__.py"), "w")
+            pkg1_init.write(ns_str)
+            pkg1_init.close()
+            pkg2_init = open(os.path.join(self._tmpdir, site,
+                             "pkg1", "pkg2", "__init__.py"), "w")
+            pkg2_init.write(ns_str)
+            pkg2_init.close()
+        import pkg1
+        self._assertIn("pkg1", pkg_resources._namespace_packages.keys())
+        try:
+            import pkg1.pkg2
+        except ImportError, e:
+            self.fail("Distribute tried to import the parent namespace package")
+        # check the _namespace_packages dict
+        self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys())
+        self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"])
+        # check the __path__ attribute contains both paths
+        self.assertEqual(pkg1.pkg2.__path__, [
+                os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"),
+                os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2") ])
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_sandbox.py b/vendor/distribute-0.6.35/setuptools/tests/test_sandbox.py
new file mode 100644
index 0000000000000000000000000000000000000000..1609ee861b3ae958d31fbd58c638d79deb5b1ff0
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_sandbox.py
@@ -0,0 +1,66 @@
+"""develop tests
+"""
+import sys
+import os
+import shutil
+import unittest
+import tempfile
+
+from setuptools.sandbox import DirectorySandbox, SandboxViolation
+
+def has_win32com():
+    """
+    Run this to determine if the local machine has win32com, and if it
+    does, include additional tests.
+    """
+    if not sys.platform.startswith('win32'):
+        return False
+    try:
+        mod = __import__('win32com')
+    except ImportError:
+        return False
+    return True
+
+class TestSandbox(unittest.TestCase):
+
+    def setUp(self):
+        self.dir = tempfile.mkdtemp()
+
+    def tearDown(self):
+        shutil.rmtree(self.dir)
+
+    def test_devnull(self):
+        if sys.version < '2.4':
+            return
+        sandbox = DirectorySandbox(self.dir)
+        sandbox.run(self._file_writer(os.devnull))
+
+    def _file_writer(path):
+        def do_write():
+            f = open(path, 'w')
+            f.write('xxx')
+            f.close()
+        return do_write
+
+    _file_writer = staticmethod(_file_writer)
+
+    if has_win32com():
+        def test_win32com(self):
+            """
+            win32com should not be prevented from caching COM interfaces
+            in gen_py.
+            """
+            import win32com
+            gen_py = win32com.__gen_path__
+            target = os.path.join(gen_py, 'test_write')
+            sandbox = DirectorySandbox(self.dir)
+            try:
+                try:
+                    sandbox.run(self._file_writer(target))
+                except SandboxViolation:
+                    self.fail("Could not create gen_py file due to SandboxViolation")
+            finally:
+                if os.path.exists(target): os.remove(target)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_sdist.py b/vendor/distribute-0.6.35/setuptools/tests/test_sdist.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9d5d6e56c188608d74e2a44668712aa8dc4e102
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_sdist.py
@@ -0,0 +1,383 @@
+# -*- coding: utf-8 -*-
+"""sdist tests"""
+
+
+import os
+import shutil
+import sys
+import tempfile
+import unittest
+import urllib
+import unicodedata
+from StringIO import StringIO
+
+
+from setuptools.command.sdist import sdist
+from setuptools.command.egg_info import manifest_maker
+from setuptools.dist import Distribution
+
+
+SETUP_ATTRS = {
+    'name': 'sdist_test',
+    'version': '0.0',
+    'packages': ['sdist_test'],
+    'package_data': {'sdist_test': ['*.txt']}
+}
+
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(**%r)
+""" % SETUP_ATTRS
+
+
+if sys.version_info >= (3,):
+    LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
+else:
+    LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
+
+
+# Cannot use context manager because of Python 2.4
+def quiet():
+    global old_stdout, old_stderr
+    old_stdout, old_stderr = sys.stdout, sys.stderr
+    sys.stdout, sys.stderr = StringIO(), StringIO()
+
+def unquiet():
+    sys.stdout, sys.stderr = old_stdout, old_stderr
+
+
+# Fake byte literals for Python <= 2.5
+def b(s, encoding='utf-8'):
+    if sys.version_info >= (3,):
+        return s.encode(encoding)
+    return s
+
+
+# Convert to POSIX path
+def posix(path):
+    if sys.version_info >= (3,) and not isinstance(path, unicode):
+        return path.replace(os.sep.encode('ascii'), b('/'))
+    else:
+        return path.replace(os.sep, '/')
+
+
+# HFS Plus uses decomposed UTF-8
+def decompose(path):
+    if isinstance(path, unicode):
+        return unicodedata.normalize('NFD', path)
+    try:
+        path = path.decode('utf-8')
+        path = unicodedata.normalize('NFD', path)
+        path = path.encode('utf-8')
+    except UnicodeError:
+        pass # Not UTF-8
+    return path
+
+
+class TestSdistTest(unittest.TestCase):
+
+    def setUp(self):
+        self.temp_dir = tempfile.mkdtemp()
+        f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
+        f.write(SETUP_PY)
+        f.close()
+        # Set up the rest of the test package
+        test_pkg = os.path.join(self.temp_dir, 'sdist_test')
+        os.mkdir(test_pkg)
+        # *.rst was not included in package_data, so c.rst should not be
+        # automatically added to the manifest when not under version control
+        for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
+            # Just touch the files; their contents are irrelevant
+            open(os.path.join(test_pkg, fname), 'w').close()
+
+        self.old_cwd = os.getcwd()
+        os.chdir(self.temp_dir)
+
+    def tearDown(self):
+        os.chdir(self.old_cwd)
+        shutil.rmtree(self.temp_dir)
+
+    def test_package_data_in_sdist(self):
+        """Regression test for pull request #4: ensures that files listed in
+        package_data are included in the manifest even if they're not added to
+        version control.
+        """
+
+        dist = Distribution(SETUP_ATTRS)
+        dist.script_name = 'setup.py'
+        cmd = sdist(dist)
+        cmd.ensure_finalized()
+
+        # squelch output
+        quiet()
+        try:
+            cmd.run()
+        finally:
+            unquiet()
+
+        manifest = cmd.filelist.files
+        self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
+        self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)
+        self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest)
+
+    def test_manifest_is_written_with_utf8_encoding(self):
+        # Test for #303.
+        dist = Distribution(SETUP_ATTRS)
+        dist.script_name = 'setup.py'
+        mm = manifest_maker(dist)
+        mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+        os.mkdir('sdist_test.egg-info')
+
+        # UTF-8 filename
+        filename = os.path.join('sdist_test', 'smörbröd.py')
+
+        # Add UTF-8 filename and write manifest
+        quiet()
+        try:
+            mm.run()
+            mm.filelist.files.append(filename)
+            mm.write_manifest()
+        finally:
+            unquiet()
+
+        manifest = open(mm.manifest, 'rbU')
+        contents = manifest.read()
+        manifest.close()
+
+        # The manifest should be UTF-8 encoded
+        try:
+            u_contents = contents.decode('UTF-8')
+        except UnicodeDecodeError, e:
+            self.fail(e)
+
+        # The manifest should contain the UTF-8 filename
+        if sys.version_info >= (3,):
+            self.assertTrue(posix(filename) in u_contents)
+        else:
+            self.assertTrue(posix(filename) in contents)
+
+    # Python 3 only
+    if sys.version_info >= (3,):
+
+        def test_write_manifest_allows_utf8_filenames(self):
+            # Test for #303.
+            dist = Distribution(SETUP_ATTRS)
+            dist.script_name = 'setup.py'
+            mm = manifest_maker(dist)
+            mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+            os.mkdir('sdist_test.egg-info')
+
+            # UTF-8 filename
+            filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
+
+            # Add filename and write manifest
+            quiet()
+            try:
+                mm.run()
+                u_filename = filename.decode('utf-8')
+                mm.filelist.files.append(u_filename)
+                # Re-write manifest
+                mm.write_manifest()
+            finally:
+                unquiet()
+
+            manifest = open(mm.manifest, 'rbU')
+            contents = manifest.read()
+            manifest.close()
+
+            # The manifest should be UTF-8 encoded
+            try:
+                contents.decode('UTF-8')
+            except UnicodeDecodeError, e:
+                self.fail(e)
+
+            # The manifest should contain the UTF-8 filename
+            self.assertTrue(posix(filename) in contents)
+
+            # The filelist should have been updated as well
+            self.assertTrue(u_filename in mm.filelist.files)
+
+        def test_write_manifest_skips_non_utf8_filenames(self):
+            # Test for #303.
+            dist = Distribution(SETUP_ATTRS)
+            dist.script_name = 'setup.py'
+            mm = manifest_maker(dist)
+            mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+            os.mkdir('sdist_test.egg-info')
+
+            # Latin-1 filename
+            filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
+
+            # Add filename with surrogates and write manifest
+            quiet()
+            try:
+                mm.run()
+                u_filename = filename.decode('utf-8', 'surrogateescape')
+                mm.filelist.files.append(u_filename)
+                # Re-write manifest
+                mm.write_manifest()
+            finally:
+                unquiet()
+
+            manifest = open(mm.manifest, 'rbU')
+            contents = manifest.read()
+            manifest.close()
+
+            # The manifest should be UTF-8 encoded
+            try:
+                contents.decode('UTF-8')
+            except UnicodeDecodeError, e:
+                self.fail(e)
+
+            # The Latin-1 filename should have been skipped
+            self.assertFalse(posix(filename) in contents)
+
+            # The filelist should have been updated as well
+            self.assertFalse(u_filename in mm.filelist.files)
+
+    def test_manifest_is_read_with_utf8_encoding(self):
+        # Test for #303.
+        dist = Distribution(SETUP_ATTRS)
+        dist.script_name = 'setup.py'
+        cmd = sdist(dist)
+        cmd.ensure_finalized()
+
+        # Create manifest
+        quiet()
+        try:
+            cmd.run()
+        finally:
+            unquiet()
+
+        # Add UTF-8 filename to manifest
+        filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
+        cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+        manifest = open(cmd.manifest, 'ab')
+        manifest.write(b('\n')+filename)
+        manifest.close()
+
+        # The file must exist to be included in the filelist
+        open(filename, 'w').close()
+
+        # Re-read manifest
+        cmd.filelist.files = []
+        quiet()
+        try:
+            cmd.read_manifest()
+        finally:
+            unquiet()
+
+        # The filelist should contain the UTF-8 filename
+        if sys.version_info >= (3,):
+            filename = filename.decode('utf-8')
+        self.assertTrue(filename in cmd.filelist.files)
+
+    # Python 3 only
+    if sys.version_info >= (3,):
+
+        def test_read_manifest_skips_non_utf8_filenames(self):
+            # Test for #303.
+            dist = Distribution(SETUP_ATTRS)
+            dist.script_name = 'setup.py'
+            cmd = sdist(dist)
+            cmd.ensure_finalized()
+
+            # Create manifest
+            quiet()
+            try:
+                cmd.run()
+            finally:
+                unquiet()
+
+            # Add Latin-1 filename to manifest
+            filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
+            cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
+            manifest = open(cmd.manifest, 'ab')
+            manifest.write(b('\n')+filename)
+            manifest.close()
+
+            # The file must exist to be included in the filelist
+            open(filename, 'w').close()
+
+            # Re-read manifest
+            cmd.filelist.files = []
+            quiet()
+            try:
+                try:
+                    cmd.read_manifest()
+                except UnicodeDecodeError, e:
+                    self.fail(e)
+            finally:
+                unquiet()
+
+            # The Latin-1 filename should have been skipped
+            filename = filename.decode('latin-1')
+            self.assertFalse(filename in cmd.filelist.files)
+
+    def test_sdist_with_utf8_encoded_filename(self):
+        # Test for #303.
+        dist = Distribution(SETUP_ATTRS)
+        dist.script_name = 'setup.py'
+        cmd = sdist(dist)
+        cmd.ensure_finalized()
+
+        # UTF-8 filename
+        filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
+        open(filename, 'w').close()
+
+        quiet()
+        try:
+            cmd.run()
+        finally:
+            unquiet()
+
+        if sys.platform == 'darwin':
+            filename = decompose(filename)
+
+        if sys.version_info >= (3,):
+            if sys.platform == 'win32':
+                # Python 3 mangles the UTF-8 filename
+                filename = filename.decode('cp1252')
+                self.assertTrue(filename in cmd.filelist.files)
+            else:
+                filename = filename.decode('utf-8')
+                self.assertTrue(filename in cmd.filelist.files)
+        else:
+            self.assertTrue(filename in cmd.filelist.files)
+
+    def test_sdist_with_latin1_encoded_filename(self):
+        # Test for #303.
+        dist = Distribution(SETUP_ATTRS)
+        dist.script_name = 'setup.py'
+        cmd = sdist(dist)
+        cmd.ensure_finalized()
+
+        # Latin-1 filename
+        filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
+        open(filename, 'w').close()
+
+        quiet()
+        try:
+            cmd.run()
+        finally:
+            unquiet()
+
+        if sys.version_info >= (3,):
+            filename = filename.decode('latin-1')
+            if sys.platform == 'win32':
+                # Latin-1 is similar to Windows-1252
+                self.assertTrue(filename in cmd.filelist.files)
+            else:
+                # The Latin-1 filename should have been skipped
+                self.assertFalse(filename in cmd.filelist.files)
+        else:
+            # No conversion takes place under Python 2 and the file
+            # is included. We shall keep it that way for BBB.
+            self.assertTrue(filename in cmd.filelist.files)
+
+
+def test_suite():
+    return unittest.defaultTestLoader.loadTestsFromName(__name__)
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_test.py b/vendor/distribute-0.6.35/setuptools/tests/test_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..ad7cbd0f9695208923f12912621b42600503e78c
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_test.py
@@ -0,0 +1,124 @@
+# -*- coding: UTF-8 -*- 
+
+"""develop tests
+"""
+import sys
+import os, shutil, tempfile, unittest
+import tempfile
+import site
+from StringIO import StringIO
+
+from distutils.errors import DistutilsError
+from setuptools.command.test import test
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo',
+    packages=['name', 'name.space', 'name.space.tests'],
+    namespace_packages=['name'],
+    test_suite='name.space.tests.test_suite',
+)
+"""
+
+NS_INIT = """# -*- coding: Latin-1 -*- 
+# Söme Arbiträry Ünicode to test Issüé 310
+try:
+    __import__('pkg_resources').declare_namespace(__name__)
+except ImportError:
+    from pkgutil import extend_path
+    __path__ = extend_path(__path__, __name__)
+"""
+# Make sure this is Latin-1 binary, before writing:
+if sys.version_info < (3,):
+    NS_INIT = NS_INIT.decode('UTF-8')
+NS_INIT = NS_INIT.encode('Latin-1')
+
+TEST_PY = """import unittest
+
+class TestTest(unittest.TestCase):
+    def test_test(self):
+        print "Foo" # Should fail under Python 3 unless 2to3 is used
+
+test_suite = unittest.makeSuite(TestTest)
+"""
+
+class TestTestTest(unittest.TestCase):
+
+    def setUp(self):
+        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
+            return
+
+        # Directory structure
+        self.dir = tempfile.mkdtemp()
+        os.mkdir(os.path.join(self.dir, 'name'))
+        os.mkdir(os.path.join(self.dir, 'name', 'space'))
+        os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests'))
+        # setup.py
+        setup = os.path.join(self.dir, 'setup.py')
+        f = open(setup, 'wt')
+        f.write(SETUP_PY)
+        f.close()
+        self.old_cwd = os.getcwd()
+        # name/__init__.py
+        init = os.path.join(self.dir, 'name', '__init__.py')
+        f = open(init, 'wb')
+        f.write(NS_INIT)
+        f.close()
+        # name/space/__init__.py
+        init = os.path.join(self.dir, 'name', 'space', '__init__.py')
+        f = open(init, 'wt')
+        f.write('#empty\n')
+        f.close()
+        # name/space/tests/__init__.py
+        init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py')
+        f = open(init, 'wt')
+        f.write(TEST_PY)
+        f.close()
+        
+        os.chdir(self.dir)
+        self.old_base = site.USER_BASE
+        site.USER_BASE = tempfile.mkdtemp()
+        self.old_site = site.USER_SITE
+        site.USER_SITE = tempfile.mkdtemp()
+
+    def tearDown(self):
+        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
+            return
+        
+        os.chdir(self.old_cwd)
+        shutil.rmtree(self.dir)
+        shutil.rmtree(site.USER_BASE)
+        shutil.rmtree(site.USER_SITE)
+        site.USER_BASE = self.old_base
+        site.USER_SITE = self.old_site
+
+    def test_test(self):
+        if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
+            return
+        
+        dist = Distribution(dict(
+            name='foo',
+            packages=['name', 'name.space', 'name.space.tests'],
+            namespace_packages=['name'],
+            test_suite='name.space.tests.test_suite',
+            use_2to3=True,
+            ))
+        dist.script_name = 'setup.py'
+        cmd = test(dist)
+        cmd.user = 1
+        cmd.ensure_finalized()
+        cmd.install_dir = site.USER_SITE
+        cmd.user = 1
+        old_stdout = sys.stdout
+        sys.stdout = StringIO()
+        try:
+            try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements.
+                cmd.run()
+            except SystemExit: # The test runner calls sys.exit, stop that making an error.
+                pass
+        finally:
+            sys.stdout = old_stdout
+            
\ No newline at end of file
diff --git a/vendor/distribute-0.6.35/setuptools/tests/test_upload_docs.py b/vendor/distribute-0.6.35/setuptools/tests/test_upload_docs.py
new file mode 100644
index 0000000000000000000000000000000000000000..769f16cc5a0ca2175d350a99724a4660dc6b17d2
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/test_upload_docs.py
@@ -0,0 +1,72 @@
+"""build_ext tests
+"""
+import sys, os, shutil, tempfile, unittest, site, zipfile
+from setuptools.command.upload_docs import upload_docs
+from setuptools.dist import Distribution
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo')
+"""
+
+class TestUploadDocsTest(unittest.TestCase):
+    def setUp(self):
+        self.dir = tempfile.mkdtemp()
+        setup = os.path.join(self.dir, 'setup.py')
+        f = open(setup, 'w')
+        f.write(SETUP_PY)
+        f.close()
+        self.old_cwd = os.getcwd()
+        os.chdir(self.dir)
+
+        self.upload_dir = os.path.join(self.dir, 'build')
+        os.mkdir(self.upload_dir)
+
+        # A test document.
+        f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
+        f.write("Hello world.")
+        f.close()
+
+        # An empty folder.
+        os.mkdir(os.path.join(self.upload_dir, 'empty'))
+
+        if sys.version >= "2.6":
+            self.old_base = site.USER_BASE
+            site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
+            self.old_site = site.USER_SITE
+            site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
+
+    def tearDown(self):
+        os.chdir(self.old_cwd)
+        shutil.rmtree(self.dir)
+        if sys.version >= "2.6":
+            shutil.rmtree(site.USER_BASE)
+            shutil.rmtree(site.USER_SITE)
+            site.USER_BASE = self.old_base
+            site.USER_SITE = self.old_site
+
+    def test_create_zipfile(self):
+        # Test to make sure zipfile creation handles common cases.
+        # This explicitly includes a folder containing an empty folder.
+
+        dist = Distribution()
+
+        cmd = upload_docs(dist)
+        cmd.upload_dir = self.upload_dir
+        cmd.target_dir = self.upload_dir
+        tmp_dir = tempfile.mkdtemp()
+        tmp_file = os.path.join(tmp_dir, 'foo.zip')
+        try:
+            zip_file = cmd.create_zipfile(tmp_file)
+
+            assert zipfile.is_zipfile(tmp_file)
+
+            zip_file = zipfile.ZipFile(tmp_file) # woh...
+
+            assert zip_file.namelist() == ['index.html']
+
+            zip_file.close()
+        finally:
+            shutil.rmtree(tmp_dir)
+
diff --git a/vendor/distribute-0.6.35/setuptools/tests/win_script_wrapper.txt b/vendor/distribute-0.6.35/setuptools/tests/win_script_wrapper.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9f7c81d6b7d34ddda9111567db2a65ddeda0a745
--- /dev/null
+++ b/vendor/distribute-0.6.35/setuptools/tests/win_script_wrapper.txt
@@ -0,0 +1,151 @@
+Python Script Wrapper for Windows
+=================================
+
+setuptools includes wrappers for Python scripts that allows them to be
+executed like regular windows programs.  There are 2 wrappers, once
+for command-line programs, cli.exe, and one for graphica programs,
+gui.exe.  These programs are almost identical, function pretty much
+the same way, and are generated from the same source file.  The
+wrapper programs are used by copying them to the directory containing
+the script they are to wrap and with the same name as the script they
+are to wrap.  In the rest of this document, we'll give an example that
+will illustrate this.
+
+Let's create a simple script, foo-script.py:
+
+    >>> import os, sys, tempfile
+    >>> from setuptools.command.easy_install import nt_quote_arg
+    >>> sample_directory = tempfile.mkdtemp()
+    >>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w')
+    >>> f.write(
+    ... """#!%(python_exe)s
+    ... import sys
+    ... input = repr(sys.stdin.read())
+    ... print sys.argv[0][-14:]
+    ... print sys.argv[1:]
+    ... print input
+    ... if __debug__:
+    ...     print 'non-optimized'
+    ... """ % dict(python_exe=nt_quote_arg(sys.executable)))
+    >>> f.close()
+
+Note that the script starts with a Unix-style '#!' line saying which
+Python executable to run.  The wrapper will use this to find the
+correct Python executable.
+
+We'll also copy cli.exe to the sample-directory with the name foo.exe:
+
+    >>> import pkg_resources
+    >>> f = open(os.path.join(sample_directory, 'foo.exe'), 'wb')
+    >>> f.write(
+    ...     pkg_resources.resource_string('setuptools', 'cli.exe')
+    ...     )
+    >>> f.close()
+
+When the copy of cli.exe, foo.exe in this example, runs, it examines
+the path name it was run with and computes a Python script path name
+by removing the '.exe' suffic and adding the '-script.py' suffix. (For
+GUI programs, the suffix '-script-pyw' is added.)  This is why we
+named out script the way we did.  Now we can run out script by running
+the wrapper:
+
+    >>> import os
+    >>> input, output = os.popen4('"'+nt_quote_arg(os.path.join(sample_directory, 'foo.exe'))
+    ...               + r' arg1 "arg 2" "arg \"2\\\"" "arg 4\\" "arg5 a\\b"')
+    >>> input.write('hello\nworld\n')
+    >>> input.close()
+    >>> print output.read(),
+    \foo-script.py
+    ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
+    'hello\nworld\n'
+    non-optimized
+
+This example was a little pathological in that it exercised windows
+(MS C runtime) quoting rules:
+
+- Strings containing spaces are surrounded by double quotes.
+
+- Double quotes in strings need to be escaped by preceding them with
+  back slashes.
+
+- One or more backslashes preceding double quotes quotes need to be
+  escaped by preceding each of them them with back slashes.
+
+
+Specifying Python Command-line Options
+--------------------------------------
+
+You can specify a single argument on the '#!' line.  This can be used
+to specify Python options like -O, to run in optimized mode or -i
+to start the interactive interpreter.  You can combine multiple
+options as usual. For example, to run in optimized mode and
+enter the interpreter after running the script, you could use -Oi:
+
+    >>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w')
+    >>> f.write(
+    ... """#!%(python_exe)s  -Oi  
+    ... import sys
+    ... input = repr(sys.stdin.read())
+    ... print sys.argv[0][-14:]
+    ... print sys.argv[1:]
+    ... print input
+    ... if __debug__:
+    ...     print 'non-optimized'
+    ... sys.ps1 = '---'
+    ... """ % dict(python_exe=nt_quote_arg(sys.executable)))
+    >>> f.close()
+
+    >>> input, output = os.popen4(nt_quote_arg(os.path.join(sample_directory, 'foo.exe')))
+    >>> input.close()
+    >>> print output.read(),
+    \foo-script.py
+    []
+    ''
+    ---
+
+Testing the GUI Version
+-----------------------
+
+Now let's test the GUI version with the simple scipt, bar-script.py:
+
+    >>> import os, sys, tempfile
+    >>> from setuptools.command.easy_install import nt_quote_arg
+    >>> sample_directory = tempfile.mkdtemp()
+    >>> f = open(os.path.join(sample_directory, 'bar-script.pyw'), 'w')
+    >>> f.write(
+    ... """#!%(python_exe)s
+    ... import sys
+    ... f = open(sys.argv[1], 'wb')
+    ... f.write(repr(sys.argv[2]))
+    ... f.close()
+    ... """ % dict(python_exe=nt_quote_arg(sys.executable)))
+    >>> f.close()
+
+We'll also copy gui.exe to the sample-directory with the name bar.exe:
+
+    >>> import pkg_resources
+    >>> f = open(os.path.join(sample_directory, 'bar.exe'), 'wb')
+    >>> f.write(
+    ...     pkg_resources.resource_string('setuptools', 'gui.exe')
+    ...     )
+    >>> f.close()
+
+Finally, we'll run the script and check the result:
+
+    >>> import os
+    >>> input, output = os.popen4('"'+nt_quote_arg(os.path.join(sample_directory, 'bar.exe'))
+    ...               + r' "%s" "Test Argument"' % os.path.join(sample_directory, 'test_output.txt'))
+    >>> input.close()
+    >>> print output.read()
+    <BLANKLINE>
+    >>> f = open(os.path.join(sample_directory, 'test_output.txt'), 'rb')
+    >>> print f.read()
+    'Test Argument'
+    >>> f.close()
+
+
+We're done with the sample_directory:
+
+    >>> import shutil
+    >>> shutil.rmtree(sample_directory)
+
diff --git a/vendor/distribute-0.6.35/site.py b/vendor/distribute-0.6.35/site.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7166f1407adc86169b0c13aef44983e0c07d30c
--- /dev/null
+++ b/vendor/distribute-0.6.35/site.py
@@ -0,0 +1,83 @@
+def __boot():
+    import sys, os, os.path
+    PYTHONPATH = os.environ.get('PYTHONPATH')
+    if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
+        PYTHONPATH = []
+    else:
+        PYTHONPATH = PYTHONPATH.split(os.pathsep)
+
+    pic = getattr(sys,'path_importer_cache',{})
+    stdpath = sys.path[len(PYTHONPATH):]
+    mydir = os.path.dirname(__file__)
+    #print "searching",stdpath,sys.path
+
+    for item in stdpath:
+        if item==mydir or not item:
+            continue    # skip if current dir. on Windows, or my own directory
+        importer = pic.get(item)
+        if importer is not None:
+            loader = importer.find_module('site')
+            if loader is not None:
+                # This should actually reload the current module
+                loader.load_module('site')
+                break
+        else:
+            try:
+                import imp # Avoid import loop in Python >= 3.3
+                stream, path, descr = imp.find_module('site',[item])
+            except ImportError:
+                continue
+            if stream is None:
+                continue
+            try:
+                # This should actually reload the current module
+                imp.load_module('site',stream,path,descr)
+            finally:
+                stream.close()
+            break
+    else:
+        raise ImportError("Couldn't find the real 'site' module")
+
+    #print "loaded", __file__
+
+    known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
+
+    oldpos = getattr(sys,'__egginsert',0)   # save old insertion position
+    sys.__egginsert = 0                     # and reset the current one
+
+    for item in PYTHONPATH:
+        addsitedir(item)
+
+    sys.__egginsert += oldpos           # restore effective old position
+    
+    d,nd = makepath(stdpath[0])
+    insert_at = None
+    new_path = []
+
+    for item in sys.path:
+        p,np = makepath(item)
+
+        if np==nd and insert_at is None:
+            # We've hit the first 'system' path entry, so added entries go here
+            insert_at = len(new_path)
+
+        if np in known_paths or insert_at is None:
+            new_path.append(item)
+        else:
+            # new path after the insert point, back-insert it
+            new_path.insert(insert_at, item)
+            insert_at += 1
+            
+    sys.path[:] = new_path
+
+if __name__=='site':    
+    __boot()
+    del __boot
+    
+
+
+
+
+
+
+
diff --git a/vendor/distribute-0.6.35/tests/api_tests.txt b/vendor/distribute-0.6.35/tests/api_tests.txt
new file mode 100644
index 0000000000000000000000000000000000000000..6cf6e66f27c2252d52812a7a62466a66371fe4a3
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/api_tests.txt
@@ -0,0 +1,330 @@
+Pluggable Distributions of Python Software
+==========================================
+
+Distributions
+-------------
+
+A "Distribution" is a collection of files that represent a "Release" of a
+"Project" as of a particular point in time, denoted by a
+"Version"::
+
+    >>> import sys, pkg_resources
+    >>> from pkg_resources import Distribution
+    >>> Distribution(project_name="Foo", version="1.2")
+    Foo 1.2
+
+Distributions have a location, which can be a filename, URL, or really anything
+else you care to use::
+
+    >>> dist = Distribution(
+    ...     location="http://example.com/something",
+    ...     project_name="Bar", version="0.9"
+    ... )
+
+    >>> dist
+    Bar 0.9 (http://example.com/something)
+
+
+Distributions have various introspectable attributes::
+
+    >>> dist.location
+    'http://example.com/something'
+
+    >>> dist.project_name
+    'Bar'
+
+    >>> dist.version
+    '0.9'
+
+    >>> dist.py_version == sys.version[:3]
+    True
+
+    >>> print dist.platform
+    None
+
+Including various computed attributes::
+
+    >>> from pkg_resources import parse_version
+    >>> dist.parsed_version == parse_version(dist.version)
+    True
+
+    >>> dist.key    # case-insensitive form of the project name
+    'bar'
+
+Distributions are compared (and hashed) by version first::
+
+    >>> Distribution(version='1.0') == Distribution(version='1.0')
+    True
+    >>> Distribution(version='1.0') == Distribution(version='1.1')
+    False
+    >>> Distribution(version='1.0') <  Distribution(version='1.1')
+    True
+
+but also by project name (case-insensitive), platform, Python version,
+location, etc.::
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="Foo",version="1.0")
+    True
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="foo",version="1.0")
+    True
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="Foo",version="1.1")
+    False
+
+    >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
+    ... Distribution(project_name="Foo",py_version="2.4",version="1.0")
+    False
+
+    >>> Distribution(location="spam",version="1.0") == \
+    ... Distribution(location="spam",version="1.0")
+    True
+
+    >>> Distribution(location="spam",version="1.0") == \
+    ... Distribution(location="baz",version="1.0")
+    False
+
+
+
+Hash and compare distribution by prio/plat
+
+Get version from metadata
+provider capabilities
+egg_name()
+as_requirement()
+from_location, from_filename (w/path normalization)
+
+Releases may have zero or more "Requirements", which indicate
+what releases of another project the release requires in order to
+function.  A Requirement names the other project, expresses some criteria
+as to what releases of that project are acceptable, and lists any "Extras"
+that the requiring release may need from that project.  (An Extra is an
+optional feature of a Release, that can only be used if its additional
+Requirements are satisfied.)
+
+
+
+The Working Set
+---------------
+
+A collection of active distributions is called a Working Set.  Note that a
+Working Set can contain any importable distribution, not just pluggable ones.
+For example, the Python standard library is an importable distribution that
+will usually be part of the Working Set, even though it is not pluggable.
+Similarly, when you are doing development work on a project, the files you are
+editing are also a Distribution.  (And, with a little attention to the
+directory names used,  and including some additional metadata, such a
+"development distribution" can be made pluggable as well.)
+
+    >>> from pkg_resources import WorkingSet, VersionConflict
+
+A working set's entries are the sys.path entries that correspond to the active
+distributions.  By default, the working set's entries are the items on
+``sys.path``::
+
+    >>> ws = WorkingSet()
+    >>> ws.entries == sys.path
+    True
+
+But you can also create an empty working set explicitly, and add distributions
+to it::
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(dist)
+    >>> ws.entries
+    ['http://example.com/something']
+    >>> dist in ws
+    True
+    >>> Distribution('foo',version="") in ws
+    False
+
+And you can iterate over its distributions::
+
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+Adding the same distribution more than once is a no-op::
+
+    >>> ws.add(dist)
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+For that matter, adding multiple distributions for the same project also does
+nothing, because a working set can only hold one active distribution per
+project -- the first one added to it::
+
+    >>> ws.add(
+    ...     Distribution(
+    ...         'http://example.com/something', project_name="Bar",
+    ...         version="7.2"
+    ...     )
+    ... )
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+You can append a path entry to a working set using ``add_entry()``::
+
+    >>> ws.entries
+    ['http://example.com/something']
+    >>> ws.add_entry(pkg_resources.__file__)
+    >>> ws.entries == ['http://example.com/something', pkg_resources.__file__]
+    True
+
+Multiple additions result in multiple entries, even if the entry is already in
+the working set (because ``sys.path`` can contain the same entry more than
+once)::
+
+    >>> ws.add_entry(pkg_resources.__file__)
+    >>> ws.entries
+    ['...example.com...', '...pkg_resources...', '...pkg_resources...']
+
+And you can specify the path entry a distribution was found under, using the
+optional second parameter to ``add()``::
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(dist,"foo")
+    >>> ws.entries
+    ['foo']
+
+But even if a distribution is found under multiple path entries, it still only
+shows up once when iterating the working set:
+
+    >>> ws.add_entry(ws.entries[0])
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
+
+    >>> from pkg_resources import Requirement
+    >>> print ws.find(Requirement.parse("Foo==1.0"))    # no match, return None
+    None
+
+    >>> ws.find(Requirement.parse("Bar==0.9"))  # match, return distribution
+    Bar 0.9 (http://example.com/something)
+
+Note that asking for a conflicting version of a distribution already in a
+working set triggers a ``pkg_resources.VersionConflict`` error:
+
+    >>> try:
+    ...     ws.find(Requirement.parse("Bar==1.0"))
+    ... except VersionConflict:
+    ...     print 'ok'
+    ok
+
+You can subscribe a callback function to receive notifications whenever a new
+distribution is added to a working set.  The callback is immediately invoked
+once for each existing distribution in the working set, and then is called
+again for new distributions added thereafter::
+
+    >>> def added(dist): print "Added", dist
+    >>> ws.subscribe(added)
+    Added Bar 0.9
+    >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") 
+    >>> ws.add(foo12)
+    Added Foo 1.2
+
+Note, however, that only the first distribution added for a given project name
+will trigger a callback, even during the initial ``subscribe()`` callback::
+
+    >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") 
+    >>> ws.add(foo14)   # no callback, because Foo 1.2 is already active
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(foo12)
+    >>> ws.add(foo14)
+    >>> ws.subscribe(added)
+    Added Foo 1.2
+    
+And adding a callback more than once has no effect, either::
+
+    >>> ws.subscribe(added)     # no callbacks
+
+    # and no double-callbacks on subsequent additions, either
+    >>> just_a_test = Distribution(project_name="JustATest", version="0.99")
+    >>> ws.add(just_a_test)
+    Added JustATest 0.99
+
+
+Finding Plugins
+---------------
+
+``WorkingSet`` objects can be used to figure out what plugins in an
+``Environment`` can be loaded without any resolution errors::
+
+    >>> from pkg_resources import Environment
+
+    >>> plugins = Environment([])   # normally, a list of plugin directories
+    >>> plugins.add(foo12)
+    >>> plugins.add(foo14)
+    >>> plugins.add(just_a_test)
+    
+In the simplest case, we just get the newest version of each distribution in
+the plugin environment::
+
+    >>> ws = WorkingSet([])
+    >>> ws.find_plugins(plugins)
+    ([JustATest 0.99, Foo 1.4 (f14)], {})
+
+But if there's a problem with a version conflict or missing requirements, the
+method falls back to older versions, and the error info dict will contain an
+exception instance for each unloadable plugin::
+
+    >>> ws.add(foo12)   # this will conflict with Foo 1.4
+    >>> ws.find_plugins(plugins)
+    ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
+
+But if you disallow fallbacks, the failed plugin will be skipped instead of
+trying older versions::
+
+    >>> ws.find_plugins(plugins, fallback=False)
+    ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
+
+
+
+Platform Compatibility Rules
+----------------------------
+
+On the Mac, there are potential compatibility issues for modules compiled
+on newer versions of Mac OS X than what the user is running. Additionally,
+Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
+
+Basic equality works as on other platforms::
+
+    >>> from pkg_resources import compatible_platforms as cp
+    >>> reqd = 'macosx-10.4-ppc'
+    >>> cp(reqd, reqd)
+    True
+    >>> cp("win32", reqd)
+    False
+
+Distributions made on other machine types are not compatible::
+
+    >>> cp("macosx-10.4-i386", reqd)
+    False
+
+Distributions made on earlier versions of the OS are compatible, as
+long as they are from the same top-level version. The patchlevel version
+number does not matter::
+
+    >>> cp("macosx-10.4-ppc", reqd)
+    True
+    >>> cp("macosx-10.3-ppc", reqd)
+    True
+    >>> cp("macosx-10.5-ppc", reqd)
+    False
+    >>> cp("macosx-9.5-ppc", reqd)
+    False
+
+Backwards compatibility for packages made via earlier versions of 
+setuptools is provided as well::
+
+    >>> cp("darwin-8.2.0-Power_Macintosh", reqd)
+    True
+    >>> cp("darwin-7.2.0-Power_Macintosh", reqd)
+    True
+    >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
+    False
+
diff --git a/vendor/distribute-0.6.35/tests/install_test.py b/vendor/distribute-0.6.35/tests/install_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..02deb81860c8d0c218157c79d3f357b73b671e77
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/install_test.py
@@ -0,0 +1,75 @@
+import urllib2
+import sys
+import os
+
+if os.path.exists('distribute_setup.py'):
+    print 'distribute_setup.py exists in the current dir, aborting'
+    sys.exit(2)
+
+print '**** Starting Test'
+print '\n\n'
+
+is_jython = sys.platform.startswith('java')
+if is_jython:
+    import subprocess
+
+print 'Downloading bootstrap'
+file = urllib2.urlopen('http://nightly.ziade.org/distribute_setup.py')
+f = open('distribute_setup.py', 'w')
+f.write(file.read())
+f.close()
+
+# running it
+args = [sys.executable]  + ['distribute_setup.py']
+if is_jython:
+    res = subprocess.call(args)
+else:
+    res = os.spawnv(os.P_WAIT, sys.executable, args)
+
+if res != 0:
+    print '**** Test failed, please send me the output at tarek@ziade.org'
+    os.remove('distribute_setup.py')
+    sys.exit(2)
+
+# now checking if Distribute is installed
+script = """\
+import sys
+try:
+    import setuptools
+except ImportError:
+    sys.exit(0)
+
+sys.exit(hasattr(setuptools, "_distribute"))
+"""
+
+root = 'script'
+seed = 0
+script_name = '%s%d.py' % (root, seed)
+
+while os.path.exists(script_name):
+    seed += 1
+    script_name = '%s%d.py' % (root, seed)
+
+f = open(script_name, 'w')
+try:
+    f.write(script)
+finally:
+    f.close()
+
+try:
+    args = [sys.executable]  + [script_name]
+    if is_jython:
+        res = subprocess.call(args)
+    else:
+        res = os.spawnv(os.P_WAIT, sys.executable, args)
+
+    print '\n\n'
+    if res:
+        print '**** Test is OK'
+    else:
+        print '**** Test failed, please send me the output at tarek@ziade.org'
+finally:
+    if os.path.exists(script_name):
+        os.remove(script_name)
+    os.remove('distribute_setup.py')
+
diff --git a/vendor/distribute-0.6.35/tests/manual_test.py b/vendor/distribute-0.6.35/tests/manual_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d5051f1656e7a451a6da7f6a4f5503d03e1b81f
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/manual_test.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+import sys
+
+if sys.version_info[0] >= 3:
+    raise NotImplementedError('Py3 not supported in this test yet')
+
+import os
+import shutil
+import tempfile
+from distutils.command.install import INSTALL_SCHEMES
+from string import Template
+from urllib2 import urlopen
+
+try:
+    import subprocess
+    def _system_call(*args):
+        assert subprocess.call(args) == 0
+except ImportError:
+    # Python 2.3
+    def _system_call(*args):
+        # quoting arguments if windows
+        if sys.platform == 'win32':
+            def quote(arg):
+                if ' ' in arg:
+                    return '"%s"' % arg
+                return arg
+            args = [quote(arg) for arg in args]
+        assert os.system(' '.join(args)) == 0
+
+def tempdir(func):
+    def _tempdir(*args, **kwargs):
+        test_dir = tempfile.mkdtemp()
+        old_dir = os.getcwd()
+        os.chdir(test_dir)
+        try:
+            return func(*args, **kwargs)
+        finally:
+            os.chdir(old_dir)
+            shutil.rmtree(test_dir)
+    return _tempdir
+
+SIMPLE_BUILDOUT = """\
+[buildout]
+
+parts = eggs
+
+[eggs]
+recipe = zc.recipe.egg
+
+eggs =
+    extensions
+"""
+
+BOOTSTRAP = 'http://python-distribute.org/bootstrap.py'
+PYVER = sys.version.split()[0][:3]
+DEV_URL = 'http://bitbucket.org/tarek/distribute/get/0.6-maintenance.zip#egg=distribute-dev'
+
+_VARS = {'base': '.',
+         'py_version_short': PYVER}
+
+if sys.platform == 'win32':
+    PURELIB = INSTALL_SCHEMES['nt']['purelib']
+else:
+    PURELIB = INSTALL_SCHEMES['unix_prefix']['purelib']
+
+
+@tempdir
+def test_virtualenv():
+    """virtualenv with distribute"""
+    purelib = os.path.abspath(Template(PURELIB).substitute(**_VARS))
+    _system_call('virtualenv', '--no-site-packages', '.', '--distribute')
+    _system_call('bin/easy_install', 'distribute==dev')
+    # linux specific
+    site_pkg = os.listdir(purelib)
+    site_pkg.sort()
+    assert 'distribute' in site_pkg[0]
+    easy_install = os.path.join(purelib, 'easy-install.pth')
+    with open(easy_install) as f:
+        res = f.read()
+    assert 'distribute' in res
+    assert 'setuptools' not in res
+
+@tempdir
+def test_full():
+    """virtualenv + pip + buildout"""
+    _system_call('virtualenv', '--no-site-packages', '.')
+    _system_call('bin/easy_install', '-q', 'distribute==dev')
+    _system_call('bin/easy_install', '-qU', 'distribute==dev')
+    _system_call('bin/easy_install', '-q', 'pip')
+    _system_call('bin/pip', 'install', '-q', 'zc.buildout')
+
+    with open('buildout.cfg', 'w') as f:
+        f.write(SIMPLE_BUILDOUT)
+
+    with open('bootstrap.py', 'w') as f:
+        f.write(urlopen(BOOTSTRAP).read())
+
+    _system_call('bin/python', 'bootstrap.py', '--distribute')
+    _system_call('bin/buildout', '-q')
+    eggs = os.listdir('eggs')
+    eggs.sort()
+    assert len(eggs) == 3
+    assert eggs[0].startswith('distribute')
+    assert eggs[1:] == ['extensions-0.3-py2.6.egg',
+                        'zc.recipe.egg-1.2.2-py2.6.egg']
+
+if __name__ == '__main__':
+    test_virtualenv()
+    test_full()
+
diff --git a/vendor/distribute-0.6.35/tests/shlib_test/hello.c b/vendor/distribute-0.6.35/tests/shlib_test/hello.c
new file mode 100644
index 0000000000000000000000000000000000000000..9998372ccd4bdd2b0a9e4dc8b9af0ec3a319e423
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/shlib_test/hello.c
@@ -0,0 +1,168 @@
+/* Generated by Pyrex 0.9.3 on Thu Jan 05 17:47:12 2006 */
+
+#include "Python.h"
+#include "structmember.h"
+#ifndef PY_LONG_LONG
+  #define PY_LONG_LONG LONG_LONG
+#endif
+
+
+typedef struct {PyObject **p; char *s;} __Pyx_InternTabEntry; /*proto*/
+typedef struct {PyObject **p; char *s; long n;} __Pyx_StringTabEntry; /*proto*/
+static PyObject *__Pyx_UnpackItem(PyObject *, int); /*proto*/
+static int __Pyx_EndUnpack(PyObject *, int); /*proto*/
+static int __Pyx_PrintItem(PyObject *); /*proto*/
+static int __Pyx_PrintNewline(void); /*proto*/
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+static void __Pyx_ReRaise(void); /*proto*/
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/
+static PyObject *__Pyx_GetExcValue(void); /*proto*/
+static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name); /*proto*/
+static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/
+static int __Pyx_GetStarArgs(PyObject **args, PyObject **kwds, char *kwd_list[], int nargs, PyObject **args2, PyObject **kwds2); /*proto*/
+static void __Pyx_WriteUnraisable(char *name); /*proto*/
+static void __Pyx_AddTraceback(char *funcname); /*proto*/
+static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size);  /*proto*/
+static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
+static int __Pyx_GetVtable(PyObject *dict, void *vtabptr); /*proto*/
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, char *modname); /*proto*/
+static int __Pyx_InternStrings(__Pyx_InternTabEntry *t); /*proto*/
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
+
+static PyObject *__pyx_m;
+static PyObject *__pyx_b;
+static int __pyx_lineno;
+static char *__pyx_filename;
+staticforward char **__pyx_f;
+
+/* Declarations from hello */
+
+char (*(get_hello_msg(void))); /*proto*/
+
+/* Implementation of hello */
+
+static PyObject *__pyx_n_hello;
+
+static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  PyObject *__pyx_r;
+  PyObject *__pyx_1 = 0;
+  static char *__pyx_argnames[] = {0};
+  if (!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "", __pyx_argnames)) return 0;
+
+  /* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":4 */
+  __pyx_1 = PyString_FromString(get_hello_msg()); if (!__pyx_1) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; goto __pyx_L1;}
+  __pyx_r = __pyx_1;
+  __pyx_1 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; Py_INCREF(__pyx_r);
+  goto __pyx_L0;
+  __pyx_L1:;
+  Py_XDECREF(__pyx_1);
+  __Pyx_AddTraceback("hello.hello");
+  __pyx_r = 0;
+  __pyx_L0:;
+  return __pyx_r;
+}
+
+static __Pyx_InternTabEntry __pyx_intern_tab[] = {
+  {&__pyx_n_hello, "hello"},
+  {0, 0}
+};
+
+static struct PyMethodDef __pyx_methods[] = {
+  {"hello", (PyCFunction)__pyx_f_5hello_hello, METH_VARARGS|METH_KEYWORDS, 0},
+  {0, 0, 0, 0}
+};
+
+DL_EXPORT(void) inithello(void); /*proto*/
+DL_EXPORT(void) inithello(void) {
+  __pyx_m = Py_InitModule4("hello", __pyx_methods, 0, 0, PYTHON_API_VERSION);
+  if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
+  __pyx_b = PyImport_AddModule("__builtin__");
+  if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
+  if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
+  if (__Pyx_InternStrings(__pyx_intern_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;};
+
+  /* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":3 */
+  return;
+  __pyx_L1:;
+  __Pyx_AddTraceback("hello");
+}
+
+static char *__pyx_filenames[] = {
+  "hello.pyx",
+};
+statichere char **__pyx_f = __pyx_filenames;
+
+/* Runtime support code */
+
+static int __Pyx_InternStrings(__Pyx_InternTabEntry *t) {
+    while (t->p) {
+        *t->p = PyString_InternFromString(t->s);
+        if (!*t->p)
+            return -1;
+        ++t;
+    }
+    return 0;
+}
+
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+
+static void __Pyx_AddTraceback(char *funcname) {
+    PyObject *py_srcfile = 0;
+    PyObject *py_funcname = 0;
+    PyObject *py_globals = 0;
+    PyObject *empty_tuple = 0;
+    PyObject *empty_string = 0;
+    PyCodeObject *py_code = 0;
+    PyFrameObject *py_frame = 0;
+    
+    py_srcfile = PyString_FromString(__pyx_filename);
+    if (!py_srcfile) goto bad;
+    py_funcname = PyString_FromString(funcname);
+    if (!py_funcname) goto bad;
+    py_globals = PyModule_GetDict(__pyx_m);
+    if (!py_globals) goto bad;
+    empty_tuple = PyTuple_New(0);
+    if (!empty_tuple) goto bad;
+    empty_string = PyString_FromString("");
+    if (!empty_string) goto bad;
+    py_code = PyCode_New(
+        0,            /*int argcount,*/
+        0,            /*int nlocals,*/
+        0,            /*int stacksize,*/
+        0,            /*int flags,*/
+        empty_string, /*PyObject *code,*/
+        empty_tuple,  /*PyObject *consts,*/
+        empty_tuple,  /*PyObject *names,*/
+        empty_tuple,  /*PyObject *varnames,*/
+        empty_tuple,  /*PyObject *freevars,*/
+        empty_tuple,  /*PyObject *cellvars,*/
+        py_srcfile,   /*PyObject *filename,*/
+        py_funcname,  /*PyObject *name,*/
+        __pyx_lineno,   /*int firstlineno,*/
+        empty_string  /*PyObject *lnotab*/
+    );
+    if (!py_code) goto bad;
+    py_frame = PyFrame_New(
+        PyThreadState_Get(), /*PyThreadState *tstate,*/
+        py_code,             /*PyCodeObject *code,*/
+        py_globals,          /*PyObject *globals,*/
+        0                    /*PyObject *locals*/
+    );
+    if (!py_frame) goto bad;
+    py_frame->f_lineno = __pyx_lineno;
+    PyTraceBack_Here(py_frame);
+bad:
+    Py_XDECREF(py_srcfile);
+    Py_XDECREF(py_funcname);
+    Py_XDECREF(empty_tuple);
+    Py_XDECREF(empty_string);
+    Py_XDECREF(py_code);
+    Py_XDECREF(py_frame);
+}
diff --git a/vendor/distribute-0.6.35/tests/shlib_test/hello.pyx b/vendor/distribute-0.6.35/tests/shlib_test/hello.pyx
new file mode 100644
index 0000000000000000000000000000000000000000..58ce6919a2b6e12ae30b6f238b9fb4b50c56a517
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/shlib_test/hello.pyx
@@ -0,0 +1,4 @@
+cdef extern char *get_hello_msg()
+
+def hello():
+    return get_hello_msg()
diff --git a/vendor/distribute-0.6.35/tests/shlib_test/hellolib.c b/vendor/distribute-0.6.35/tests/shlib_test/hellolib.c
new file mode 100644
index 0000000000000000000000000000000000000000..88d65cee923688fbd2293aa1d0f62df13febf7b3
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/shlib_test/hellolib.c
@@ -0,0 +1,3 @@
+extern char* get_hello_msg() {
+    return "Hello, world!";
+}
diff --git a/vendor/distribute-0.6.35/tests/shlib_test/setup.py b/vendor/distribute-0.6.35/tests/shlib_test/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0c93996f3265bdef0eb5b746de6ca1e96052a90
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/shlib_test/setup.py
@@ -0,0 +1,10 @@
+from setuptools import setup, Extension, Library
+
+setup(
+    name="shlib_test",
+    ext_modules = [
+        Library("hellolib", ["hellolib.c"]),
+        Extension("hello", ["hello.pyx"], libraries=["hellolib"])
+    ],
+    test_suite="test_hello.HelloWorldTest",
+)
diff --git a/vendor/distribute-0.6.35/tests/shlib_test/test_hello.py b/vendor/distribute-0.6.35/tests/shlib_test/test_hello.py
new file mode 100644
index 0000000000000000000000000000000000000000..6da02e31d450ea669727d47e34c1d624cb97ce2b
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/shlib_test/test_hello.py
@@ -0,0 +1,7 @@
+from unittest import TestCase
+
+class HelloWorldTest(TestCase):
+    def testHelloMsg(self):
+        from hello import hello
+        self.assertEqual(hello(), "Hello, world!")
+
diff --git a/vendor/distribute-0.6.35/tests/test_distribute_setup.py b/vendor/distribute-0.6.35/tests/test_distribute_setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f3da058d14bea737ec5776841d2ce43b1344c4e
--- /dev/null
+++ b/vendor/distribute-0.6.35/tests/test_distribute_setup.py
@@ -0,0 +1,73 @@
+import sys
+import os
+import tempfile
+import unittest
+import shutil
+import copy
+
+CURDIR = os.path.abspath(os.path.dirname(__file__))
+TOPDIR = os.path.split(CURDIR)[0]
+sys.path.insert(0, TOPDIR)
+
+from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
+                              _do_download, _install, DEFAULT_URL,
+                              DEFAULT_VERSION)
+import distribute_setup
+
+class TestSetup(unittest.TestCase):
+
+    def urlopen(self, url):
+        return open(self.tarball)
+
+    def setUp(self):
+        self.old_sys_path = copy.copy(sys.path)
+        self.cwd = os.getcwd()
+        self.tmpdir = tempfile.mkdtemp()
+        os.chdir(TOPDIR)
+        _python_cmd("setup.py", "-q", "egg_info", "-RDb", "''", "sdist",
+                    "--dist-dir", "%s" % self.tmpdir)
+        tarball = os.listdir(self.tmpdir)[0]
+        self.tarball = os.path.join(self.tmpdir, tarball)
+        import urllib2
+        urllib2.urlopen = self.urlopen
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+        os.chdir(self.cwd)
+        sys.path = copy.copy(self.old_sys_path)
+
+    def test_build_egg(self):
+        # making it an egg
+        egg = _build_egg(self.tarball, self.tmpdir)
+
+        # now trying to import it
+        sys.path[0] = egg
+        import setuptools
+        self.assertTrue(setuptools.__file__.startswith(egg))
+
+    def test_do_download(self):
+        tmpdir = tempfile.mkdtemp()
+        _do_download(DEFAULT_VERSION, DEFAULT_URL, tmpdir, 1)
+        import setuptools
+        self.assertTrue(setuptools.bootstrap_install_from.startswith(tmpdir))
+
+    def test_install(self):
+        def _faked(*args):
+            return True
+        distribute_setup.python_cmd = _faked
+        _install(self.tarball)
+
+    def test_use_setuptools(self):
+        self.assertEqual(use_setuptools(), None)
+
+        # make sure fake_setuptools is not called by default
+        import pkg_resources
+        del pkg_resources._distribute
+        def fake_setuptools(*args):
+            raise AssertionError
+
+        pkg_resources._fake_setuptools = fake_setuptools
+        use_setuptools()
+
+if __name__ == '__main__':
+    unittest.main()