From 343b9a0ba4f72c4da641f6ba74dfbe643b70807a Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Fri, 12 Jun 2020 17:21:01 +0200 Subject: [PATCH 1/9] Start with a copy from datalad@afa682550ee742d69853f69165c0f37c5f4b5f05 --- __init__.py | 13 ++ formatters.py | 314 ++++++++++++++++++++++++++++++ setup.py | 514 ++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 841 insertions(+) create mode 100644 __init__.py create mode 100644 formatters.py create mode 100644 setup.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 00000000..c91633ab --- /dev/null +++ b/__init__.py @@ -0,0 +1,13 @@ +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the DataLad package for the +# copyright and license terms. +# +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +"""Python package for functionality needed at package 'build' time by DataLad and its extensions + +__init__ here should be really minimalistic, not import submodules by default +and submodules should also not require heavy dependencies. +""" + +__version__ = '0.1' diff --git a/formatters.py b/formatters.py new file mode 100644 index 00000000..015f62a4 --- /dev/null +++ b/formatters.py @@ -0,0 +1,314 @@ +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the DataLad package for the +# copyright and license terms. +# +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## + +import argparse +import datetime +import re + + +class ManPageFormatter(argparse.HelpFormatter): + # This code was originally distributed + # under the same License of Python + # Copyright (c) 2014 Oz Nahum Tiram + def __init__(self, + prog, + indent_increment=2, + max_help_position=4, + width=1000000, + section=1, + ext_sections=None, + authors=None, + version=None + ): + + super(ManPageFormatter, self).__init__( + prog, + indent_increment=indent_increment, + max_help_position=max_help_position, + width=width) + + self._prog = prog + self._section = 1 + self._today = datetime.date.today().strftime('%Y\\-%m\\-%d') + self._ext_sections = ext_sections + self._version = version + + def _get_formatter(self, **kwargs): + return self.formatter_class(prog=self.prog, **kwargs) + + def _markup(self, txt): + return txt.replace('-', '\\-') + + def _underline(self, string): + return "\\fI\\s-1" + string + "\\s0\\fR" + + def _bold(self, string): + if not string.strip().startswith('\\fB'): + string = '\\fB' + string + if not string.strip().endswith('\\fR'): + string = string + '\\fR' + return string + + def _mk_synopsis(self, parser): + self.add_usage(parser.usage, parser._actions, + parser._mutually_exclusive_groups, prefix='') + usage = self._format_usage(None, parser._actions, + parser._mutually_exclusive_groups, '') + # replace too long list of commands with a single placeholder + usage = re.sub(r'{[^]]*?create,.*?}', ' COMMAND ', usage, flags=re.MULTILINE) + # take care of proper wrapping + usage = re.sub(r'\[([-a-zA-Z0-9]*)\s([a-zA-Z0-9{}|_]*)\]', r'[\1\~\2]', usage) + + usage = usage.replace('%s ' % self._prog, '') + usage = '.SH SYNOPSIS\n.nh\n.HP\n\\fB%s\\fR %s\n.hy\n' % (self._markup(self._prog), + usage) + return usage + + def _mk_title(self, prog): + name_version = "{0} {1}".format(prog, self._version) + return '.TH "{0}" "{1}" "{2}" "{3}"\n'.format( + prog, self._section, self._today, name_version) + + def _mk_name(self, prog, desc): + """ + this method is in consitent with others ... it relies on + distribution + """ + desc = desc.splitlines()[0] if desc else 'it is in the name' + # ensure starting lower case + desc = desc[0].lower() + desc[1:] + return '.SH NAME\n%s \\- %s\n' % (self._bold(prog), desc) + + def _mk_description(self, parser): + desc = parser.description + desc = '\n'.join(desc.splitlines()[1:]) + if not desc: + return '' + desc = desc.replace('\n\n', '\n.PP\n') + # sub-section headings + desc = re.sub(r'^\*(.*)\*$', r'.SS \1', desc, flags=re.MULTILINE) + # italic commands + desc = re.sub(r'^ ([-a-z]*)$', r'.TP\n\\fI\1\\fR', desc, flags=re.MULTILINE) + # deindent body text, leave to troff viewer + desc = re.sub(r'^ (\S.*)\n', '\\1\n', desc, flags=re.MULTILINE) + # format NOTEs as indented paragraphs + desc = re.sub(r'^NOTE\n', '.TP\nNOTE\n', desc, flags=re.MULTILINE) + # deindent indented paragraphs after heading setup + desc = re.sub(r'^ (.*)$', '\\1', desc, flags=re.MULTILINE) + + return '.SH DESCRIPTION\n%s\n' % self._markup(desc) + + def _mk_footer(self, sections): + if not hasattr(sections, '__iter__'): + return '' + + footer = [] + for section, value in sections.items(): + part = ".SH {}\n {}".format(section.upper(), value) + footer.append(part) + + return '\n'.join(footer) + + def format_man_page(self, parser): + page = [] + page.append(self._mk_title(self._prog)) + page.append(self._mk_name(self._prog, parser.description)) + page.append(self._mk_synopsis(parser)) + page.append(self._mk_description(parser)) + page.append(self._mk_options(parser)) + page.append(self._mk_footer(self._ext_sections)) + + return ''.join(page) + + def _mk_options(self, parser): + + formatter = parser._get_formatter() + + # positionals, optionals and user-defined groups + for action_group in parser._action_groups: + formatter.start_section(None) + formatter.add_text(None) + formatter.add_arguments(action_group._group_actions) + formatter.end_section() + + # epilog + formatter.add_text(parser.epilog) + + # determine help from format above + help = formatter.format_help() + # add spaces after comma delimiters for easier reformatting + help = re.sub(r'([a-z]),([a-z])', '\\1, \\2', help) + # get proper indentation for argument items + help = re.sub(r'^ (\S.*)\n', '.TP\n\\1\n', help, flags=re.MULTILINE) + # deindent body text, leave to troff viewer + help = re.sub(r'^ (\S.*)\n', '\\1\n', help, flags=re.MULTILINE) + return '.SH OPTIONS\n' + help + + def _format_action_invocation(self, action, doubledash='--'): + if not action.option_strings: + metavar, = self._metavar_formatter(action, action.dest)(1) + return metavar + + else: + parts = [] + + # if the Optional doesn't take a value, format is: + # -s, --long + if action.nargs == 0: + parts.extend([self._bold(action_str) for action_str in + action.option_strings]) + + # if the Optional takes a value, format is: + # -s ARGS, --long ARGS + else: + default = self._underline(action.dest.upper()) + args_string = self._format_args(action, default) + for option_string in action.option_strings: + parts.append('%s %s' % (self._bold(option_string), + args_string)) + + return ', '.join(p.replace('--', doubledash) for p in parts) + + +class RSTManPageFormatter(ManPageFormatter): + def _get_formatter(self, **kwargs): + return self.formatter_class(prog=self.prog, **kwargs) + + def _markup(self, txt): + # put general tune-ups here + return txt + + def _underline(self, string): + return "*{0}*".format(string) + + def _bold(self, string): + return "**{0}**".format(string) + + def _mk_synopsis(self, parser): + self.add_usage(parser.usage, parser._actions, + parser._mutually_exclusive_groups, prefix='') + usage = self._format_usage(None, parser._actions, + parser._mutually_exclusive_groups, '') + + usage = usage.replace('%s ' % self._prog, '') + usage = 'Synopsis\n--------\n::\n\n %s %s\n' \ + % (self._markup(self._prog), usage) + return usage + + def _mk_title(self, prog): + # and an easy to use reference point + title = ".. _man_%s:\n\n" % prog.replace(' ', '-') + title += "{0}".format(prog) + title += '\n{0}\n\n'.format('=' * len(prog)) + return title + + def _mk_name(self, prog, desc): + return '' + + def _mk_description(self, parser): + desc = parser.description + if not desc: + return '' + return 'Description\n-----------\n%s\n' % self._markup(desc) + + def _mk_footer(self, sections): + if not hasattr(sections, '__iter__'): + return '' + + footer = [] + for section, value in sections.items(): + part = "\n{0}\n{1}\n{2}\n".format( + section, + '-' * len(section), + value) + footer.append(part) + + return '\n'.join(footer) + + def _mk_options(self, parser): + + # this non-obvious maneuver is really necessary! + formatter = self.__class__(self._prog) + + # positionals, optionals and user-defined groups + for action_group in parser._action_groups: + formatter.start_section(None) + formatter.add_text(None) + formatter.add_arguments(action_group._group_actions) + formatter.end_section() + + # epilog + formatter.add_text(parser.epilog) + + # determine help from format above + option_sec = formatter.format_help() + + return '\n\nOptions\n-------\n{0}'.format(option_sec) + + def _format_action(self, action): + # determine the required width and the entry label + action_header = self._format_action_invocation(action, doubledash='-\\\\-') + + if action.help: + help_text = self._expand_help(action) + help_lines = self._split_lines(help_text, 80) + help = ' '.join(help_lines) + else: + help = '' + + # return a single string + return '{0}\n{1}\n{2}\n\n'.format( + action_header, + + '~' * len(action_header), + help) + + +def cmdline_example_to_rst(src, out=None, ref=None): + if out is None: + from io import StringIO + out = StringIO() + + # place header + out.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') + if ref: + # place cross-ref target + out.write('.. {0}:\n\n'.format(ref)) + + # parser status vars + inexample = False + incodeblock = False + + for line in src: + if line.startswith('#% EXAMPLE START'): + inexample = True + incodeblock = False + continue + if not inexample: + continue + if line.startswith('#% EXAMPLE END'): + break + if not inexample: + continue + if line.startswith('#%'): + incodeblock = not incodeblock + if incodeblock: + out.write('\n.. code-block:: sh\n\n') + continue + if not incodeblock and line.startswith('#'): + out.write(line[(min(2, len(line) - 1)):]) + continue + if incodeblock: + if not line.rstrip().endswith('#% SKIP'): + out.write(' %s' % line) + continue + if not len(line.strip()): + continue + else: + raise RuntimeError("this should not happen") + + return out diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..b58d0466 --- /dev/null +++ b/setup.py @@ -0,0 +1,514 @@ +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the DataLad package for the +# copyright and license terms. +# +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## + + +import datetime +import os +import platform +import setuptools +import sys + + +from distutils.core import Command +from distutils.errors import DistutilsOptionError +from distutils.version import LooseVersion +from genericpath import exists +from os import linesep, makedirs +from os.path import dirname, join as opj, sep as pathsep, splitext +from setuptools import findall, find_packages, setup + +from . import formatters as fmt + + +def _path_rel2file(*p): + # dirname instead of joining with pardir so it works if + # datalad_build_support/ is just symlinked into some extension + # while developing + return opj(dirname(dirname(__file__)), *p) + + +def get_version(name): + """Load version from version.py without entailing any imports + + Parameters + ---------- + name: str + Name of the folder (package) where from to read version.py + """ + # This might entail lots of imports which might not yet be available + # so let's do ad-hoc parsing of the version.py + with open(_path_rel2file(name, 'version.py')) as f: + version_lines = list(filter(lambda x: x.startswith('__version__'), f)) + assert (len(version_lines) == 1) + return version_lines[0].split('=')[1].strip(" '\"\t\n") + + +class BuildManPage(Command): + # The BuildManPage code was originally distributed + # under the same License of Python + # Copyright (c) 2014 Oz Nahum Tiram + + description = 'Generate man page from an ArgumentParser instance.' + + user_options = [ + ('manpath=', None, 'output path for manpages'), + ('rstpath=', None, 'output path for RST files'), + ('parser=', None, 'module path to an ArgumentParser instance' + '(e.g. mymod:func, where func is a method or function which return' + 'a dict with one or more arparse.ArgumentParser instances.'), + ] + + def initialize_options(self): + self.manpath = opj('build', 'man') + self.rstpath = opj('docs', 'source', 'generated', 'man') + self.parser = 'datalad.cmdline.main:setup_parser' + + def finalize_options(self): + if self.manpath is None: + raise DistutilsOptionError('\'manpath\' option is required') + if self.rstpath is None: + raise DistutilsOptionError('\'rstpath\' option is required') + if self.parser is None: + raise DistutilsOptionError('\'parser\' option is required') + self.manpath = _path_rel2file(self.manpath) + self.rstpath = _path_rel2file(self.rstpath) + mod_name, func_name = self.parser.split(':') + fromlist = mod_name.split('.') + try: + mod = __import__(mod_name, fromlist=fromlist) + self._parser = getattr(mod, func_name)( + ['datalad'], + formatter_class=fmt.ManPageFormatter, + return_subparsers=True, + help_ignore_extensions=True) + + except ImportError as err: + raise err + + self.announce('Writing man page(s) to %s' % self.manpath) + self._today = datetime.date.today() + + @classmethod + def handle_module(cls, mod_name, **kwargs): + """Module specific handling. + + This particular one does + 1. Memorize (at class level) the module name of interest here + 2. Check if 'datalad.extensions' are specified for the module, + and then analyzes them to obtain command names it provides + + If cmdline commands are found, its entries are to be used instead of + the ones in datalad's _parser. + + Parameters + ---------- + **kwargs: + all the kwargs which might be provided to setuptools.setup + """ + cls.mod_name = mod_name + + exts = kwargs.get('entry_points', {}).get('datalad.extensions', []) + for ext in exts: + assert '=' in ext # should be label=module:obj + ext_label, mod_obj = ext.split('=', 1) + assert ':' in mod_obj # should be module:obj + mod, obj = mod_obj.split(':', 1) + assert mod_name == mod # AFAIK should be identical + + mod = __import__(mod_name) + if hasattr(mod, obj): + command_suite = getattr(mod, obj) + assert len(command_suite) == 2 # as far as I see it + if not hasattr(cls, 'cmdline_names'): + cls.cmdline_names = [] + cls.cmdline_names += [ + cmd + for _, _, cmd, _ in command_suite[1] + ] + + def run(self): + + dist = self.distribution + #homepage = dist.get_url() + #appname = self._parser.prog + appname = 'datalad' + + sections = { + 'Authors': """{0} is developed by {1} <{2}>.""".format( + appname, dist.get_author(), dist.get_author_email()), + } + + for cls, opath, ext in ((fmt.ManPageFormatter, self.manpath, '1'), + (fmt.RSTManPageFormatter, self.rstpath, 'rst')): + if not os.path.exists(opath): + os.makedirs(opath) + for cmdname in getattr(self, 'cmdline_names', list(self._parser)): + p = self._parser[cmdname] + cmdname = "{0}{1}".format( + 'datalad ' if cmdname != 'datalad' else '', + cmdname) + format = cls( + cmdname, + ext_sections=sections, + version=get_version(getattr(self, 'mod_name', appname))) + formatted = format.format_man_page(p) + with open(opj(opath, '{0}.{1}'.format( + cmdname.replace(' ', '-'), + ext)), + 'w') as f: + f.write(formatted) + + +class BuildRSTExamplesFromScripts(Command): + description = 'Generate RST variants of example shell scripts.' + + user_options = [ + ('expath=', None, 'path to look for example scripts'), + ('rstpath=', None, 'output path for RST files'), + ] + + def initialize_options(self): + self.expath = opj('docs', 'examples') + self.rstpath = opj('docs', 'source', 'generated', 'examples') + + def finalize_options(self): + if self.expath is None: + raise DistutilsOptionError('\'expath\' option is required') + if self.rstpath is None: + raise DistutilsOptionError('\'rstpath\' option is required') + self.expath = _path_rel2file(self.expath) + self.rstpath = _path_rel2file(self.rstpath) + self.announce('Converting example scripts') + + def run(self): + opath = self.rstpath + if not os.path.exists(opath): + os.makedirs(opath) + + from glob import glob + for example in glob(opj(self.expath, '*.sh')): + exname = os.path.basename(example)[:-3] + with open(opj(opath, '{0}.rst'.format(exname)), 'w') as out: + fmt.cmdline_example_to_rst( + open(example), + out=out, + ref='_example_{0}'.format(exname)) + + +class BuildConfigInfo(Command): + description = 'Generate RST documentation for all config items.' + + user_options = [ + ('rstpath=', None, 'output path for RST file'), + ] + + def initialize_options(self): + self.rstpath = opj('docs', 'source', 'generated', 'cfginfo') + + def finalize_options(self): + if self.rstpath is None: + raise DistutilsOptionError('\'rstpath\' option is required') + self.rstpath = _path_rel2file(self.rstpath) + self.announce('Generating configuration documentation') + + def run(self): + opath = self.rstpath + if not os.path.exists(opath): + os.makedirs(opath) + + from datalad.interface.common_cfg import definitions as cfgdefs + from datalad.dochelpers import _indent + + categories = { + 'global': {}, + 'local': {}, + 'dataset': {}, + 'misc': {} + } + for term, v in cfgdefs.items(): + categories[v.get('destination', 'misc')][term] = v + + for cat in categories: + with open(opj(opath, '{}.rst.in'.format(cat)), 'w') as rst: + rst.write('.. glossary::\n') + for term, v in sorted(categories[cat].items(), key=lambda x: x[0]): + rst.write(_indent(term, '\n ')) + qtype, docs = v.get('ui', (None, {})) + desc_tmpl = '\n' + if 'title' in docs: + desc_tmpl += '{title}:\n' + if 'text' in docs: + desc_tmpl += '{text}\n' + if 'default' in v: + default = v['default'] + if hasattr(default, 'replace'): + # protect against leaking specific home dirs + v['default'] = default.replace(os.path.expanduser('~'), '~') + desc_tmpl += 'Default: {default}\n' + if 'type' in v: + type_ = v['type'] + if hasattr(type_, 'long_description'): + type_ = type_.long_description() + else: + type_ = type_.__name__ + desc_tmpl += '\n[{type}]\n' + v['type'] = type_ + if desc_tmpl == '\n': + # we need something to avoid joining terms + desc_tmpl += 'undocumented\n' + v.update(docs) + rst.write(_indent(desc_tmpl.format(**v), ' ')) + + +class BuildSchema(Command): + description = 'Generate DataLad JSON-LD schema.' + + user_options = [ + ('path=', None, 'output path for schema file'), + ] + + def initialize_options(self): + self.path = opj('docs', 'source', '_extras') + + def finalize_options(self): + if self.path is None: + raise DistutilsOptionError('\'path\' option is required') + self.path = _path_rel2file(self.path) + self.announce('Generating JSON-LD schema file') + + def run(self): + from datalad.metadata.definitions import common_defs + from datalad.metadata.definitions import version as schema_version + import json + import shutil + + def _mk_fname(label, version): + return '{}{}{}.json'.format( + label, + '_v' if version else '', + version) + + def _defs2context(defs, context_label, vocab_version, main_version=schema_version): + opath = opj( + self.path, + _mk_fname(context_label, vocab_version)) + odir = dirname(opath) + if not os.path.exists(odir): + os.makedirs(odir) + + # to become DataLad's own JSON-LD context + context = {} + schema = {"@context": context} + if context_label != 'schema': + schema['@vocab'] = 'http://docs.datalad.org/{}'.format( + _mk_fname('schema', main_version)) + for key, val in defs.items(): + # git-annex doesn't allow ':', but in JSON-LD we need it for + # namespace separation -- let's make '.' in git-annex mean + # ':' in JSON-LD + key = key.replace('.', ':') + definition = val['def'] + if definition.startswith('http://') or definition.startswith('https://'): + # this is not a URL, hence an @id definitions that points + # to another schema + context[key] = definition + continue + # the rest are compound definitions + props = {'@id': definition} + if 'unit' in val: + props['unit'] = val['unit'] + if 'descr' in val: + props['description'] = val['descr'] + context[key] = props + + with open(opath, 'w') as fp: + json.dump( + schema, + fp, + ensure_ascii=True, + indent=1, + separators=(', ', ': '), + sort_keys=True) + print('schema written to {}'.format(opath)) + + # core vocabulary + _defs2context(common_defs, 'schema', schema_version) + + # present the same/latest version also as the default + shutil.copy( + opj(self.path, _mk_fname('schema', schema_version)), + opj(self.path, 'schema.json')) + + +def setup_entry_points(entry_points): + """Sneaky monkey patching could be fixed only via even sneakier monkey patching + + It will never break, I promise! + """ + + def get_script_content(script_name, shebang="#!/usr/bin/env python"): + return linesep.join([ + shebang, + "#", + "# Custom simplistic runner for DataLad. Assumes datalad module", + "# being available. Generated by monkey patching monkey patched", + "# setuptools.", + "#", + "from %s import main" % entry_points[script_name], + "main()", + ""]).encode() + + def patch_write_script(mod): + """Patches write_script of the module with our shim to provide + lightweight invocation script + """ + + orig_meth = getattr(mod, 'write_script') + + def _provide_lean_script_contents( + self, script_name, contents, mode="t", *ignored): + # could be a script from another module -- let it be as is + if script_name in entry_points: + # keep shebang + contents = get_script_content( + script_name, + contents.splitlines()[0].decode()) + return orig_meth(self, script_name, contents, mode=mode) + + setattr(mod, 'write_script', _provide_lean_script_contents) + + # We still need this one so that setuptools known about the scripts + # So we generate some bogus ones, and provide a list of them ;) + # pre-generate paths so we could give them to setuptools + scripts_build_dir = opj('build', 'scripts_generated') + scripts = [opj(scripts_build_dir, x) for x in entry_points] + + if 'clean' not in sys.argv: + if not exists(scripts_build_dir): + makedirs(scripts_build_dir) + for s, mod in entry_points.items(): + with open(opj(scripts_build_dir, s), 'wb') as f: + f.write(get_script_content(s)) + + platform_system = platform.system().lower() + setup_kwargs = {} + + if platform_system == 'windows': + # TODO: investigate https://github.com/matthew-brett/myscripter, + # nibabel/nixext approach to support similar setup on Windows + setup_kwargs['entry_points'] = { + 'console_scripts': ['%s=%s:main' % i for i in entry_points.items()] + } + else: + # Damn you sharktopus! + from setuptools.command.install_scripts import \ + install_scripts as stinstall_scripts + from setuptools.command.easy_install import easy_install + + patch_write_script(stinstall_scripts) + patch_write_script(easy_install) + + setup_kwargs['scripts'] = scripts + + return setup_kwargs + + +def get_long_description_from_README(): + """Read README.md, convert to .rst using pypandoc + + If pypandoc is not available or fails - just output original .md. + + Returns + ------- + dict + with keys long_description and possibly long_description_content_type + for newer setuptools which support uploading of markdown as is. + """ + # PyPI used to not render markdown. Workaround for a sane appearance + # https://github.com/pypa/pypi-legacy/issues/148#issuecomment-227757822 + # is still in place for older setuptools + + README = opj(_path_rel2file('README.md')) + + ret = {} + if LooseVersion(setuptools.__version__) >= '38.6.0': + # check than this + ret['long_description'] = open(README).read() + ret['long_description_content_type'] = 'text/markdown' + return ret + + # Convert or fall-back + try: + import pypandoc + return {'long_description': pypandoc.convert(README, 'rst')} + except (ImportError, OSError) as exc: + # attempting to install pandoc via brew on OSX currently hangs and + # pypandoc imports but throws OSError demanding pandoc + print( + "WARNING: pypandoc failed to import or thrown an error while " + "converting" + " README.md to RST: %r .md version will be used as is" % exc + ) + return {'long_description': open(README).read()} + + +def findsome(subdir, extensions): + """Find files under subdir having specified extensions + + Leading directory (datalad) gets stripped + """ + return [ + f.split(pathsep, 1)[1] for f in findall(opj('datalad', subdir)) + if splitext(f)[-1].lstrip('.') in extensions + ] + + +def datalad_setup(name, **kwargs): + """A helper for a typical invocation of setuptools.setup. + + If not provided in kwargs, following fields will be autoset to the defaults + or obtained from the present on the file system files: + + - author + - author_email + - packages -- all found packages which start with `name` + - long_description -- converted to .rst using pypandoc README.md + - version -- parsed `__version__` within `name/version.py` + + Parameters + ---------- + name: str + Name of the Python package + **kwargs: + The rest of the keyword arguments passed to setuptools.setup as is + """ + # Simple defaults + for k, v in { + 'author': "The DataLad Team and Contributors", + 'author_email': "team@datalad.org" + }.items(): + if kwargs.get(k) is None: + kwargs[k] = v + + # More complex, requiring some function call + + # Only recentish versions of find_packages support include + # packages = find_packages('.', include=['datalad*']) + # so we will filter manually for maximal compatibility + if kwargs.get('packages') is None: + kwargs['packages'] = [pkg for pkg in find_packages('.') if pkg.startswith(name)] + if kwargs.get('long_description') is None: + kwargs.update(get_long_description_from_README()) + if kwargs.get('version') is None: + kwargs['version'] = get_version(name) + + cmdclass = kwargs.get('cmdclass', {}) + # Check if command needs some module specific handling + for v in cmdclass.values(): + if hasattr(v, 'handle_module'): + getattr(v, 'handle_module')(name, **kwargs) + return setup(name=name, **kwargs) \ No newline at end of file From ba575108283843f6e9f63c1852c69cc29df08ffd Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Fri, 12 Jun 2020 17:35:46 +0200 Subject: [PATCH 2/9] Enable building manpages for extensions With the ability to point to a specific command suite. The rest is just about making datalad core not break. --- setup.py | 41 ++++++++++++++++++++++++++++++----------- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/setup.py b/setup.py index b58d0466..3add25fa 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ from distutils.version import LooseVersion from genericpath import exists from os import linesep, makedirs -from os.path import dirname, join as opj, sep as pathsep, splitext +from os.path import dirname, join as opj, sep as pathsep, splitext, isabs from setuptools import findall, find_packages, setup from . import formatters as fmt @@ -28,7 +28,12 @@ def _path_rel2file(*p): # dirname instead of joining with pardir so it works if # datalad_build_support/ is just symlinked into some extension # while developing - return opj(dirname(dirname(__file__)), *p) + if isinstance(p, str) and isabs(p): + # do not mess with absolute paths + return p + else: + # relative means relative to the datalad package + return opj(dirname(dirname(__file__)), *p) def get_version(name): @@ -55,17 +60,25 @@ class BuildManPage(Command): description = 'Generate man page from an ArgumentParser instance.' user_options = [ - ('manpath=', None, 'output path for manpages'), - ('rstpath=', None, 'output path for RST files'), + ('manpath=', None, + 'output path for manpages (relative paths are relative to the ' + 'datalad package)'), + ('rstpath=', None, + 'output path for RST files (relative paths are relative to the ' + 'datalad package)'), ('parser=', None, 'module path to an ArgumentParser instance' '(e.g. mymod:func, where func is a method or function which return' 'a dict with one or more arparse.ArgumentParser instances.'), + ('cmdsuite=', None, 'module path to an extension command suite ' + '(e.g. mymod:command_suite) to limit the build to the contained ' + 'commands.'), ] def initialize_options(self): self.manpath = opj('build', 'man') self.rstpath = opj('docs', 'source', 'generated', 'man') self.parser = 'datalad.cmdline.main:setup_parser' + self.cmdsuite = None def finalize_options(self): if self.manpath is None: @@ -84,10 +97,18 @@ def finalize_options(self): ['datalad'], formatter_class=fmt.ManPageFormatter, return_subparsers=True, - help_ignore_extensions=True) + # ignore extensions only for the main package to avoid pollution + # with all extension commands that happen to be installed + help_ignore_extensions=self.distribution.get_name() == 'datalad') except ImportError as err: raise err + if self.cmdsuite: + mod_name, suite_name = self.cmdsuite.split(':') + mod = __import__(mod_name, fromlist=mod_name.split('.')) + suite = getattr(mod, suite_name) + self.cmdlist = [c[2] if len(c) > 2 else c[1].replace('_', '-') + for c in suite[1]] self.announce('Writing man page(s) to %s' % self.manpath) self._today = datetime.date.today() @@ -147,6 +168,8 @@ def run(self): if not os.path.exists(opath): os.makedirs(opath) for cmdname in getattr(self, 'cmdline_names', list(self._parser)): + if hasattr(self, 'cmdlist') and cmdname not in self.cmdlist: + continue p = self._parser[cmdname] cmdname = "{0}{1}".format( 'datalad ' if cmdname != 'datalad' else '', @@ -495,12 +518,8 @@ def datalad_setup(name, **kwargs): kwargs[k] = v # More complex, requiring some function call - - # Only recentish versions of find_packages support include - # packages = find_packages('.', include=['datalad*']) - # so we will filter manually for maximal compatibility if kwargs.get('packages') is None: - kwargs['packages'] = [pkg for pkg in find_packages('.') if pkg.startswith(name)] + kwargs['packages'] = find_packages('.') if kwargs.get('long_description') is None: kwargs.update(get_long_description_from_README()) if kwargs.get('version') is None: @@ -511,4 +530,4 @@ def datalad_setup(name, **kwargs): for v in cmdclass.values(): if hasattr(v, 'handle_module'): getattr(v, 'handle_module')(name, **kwargs) - return setup(name=name, **kwargs) \ No newline at end of file + return setup(name=name, **kwargs) From fd99b4eece55f97514387b31787d15b393a669f9 Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Mon, 15 Jun 2020 08:15:09 +0200 Subject: [PATCH 3/9] RF: Remove helpers that are not needed for extensions DataLad core can add its own ones again, but it is not worth imposing the code on all extensions. --- setup.py | 284 +------------------------------------------------------ 1 file changed, 3 insertions(+), 281 deletions(-) diff --git a/setup.py b/setup.py index 3add25fa..c693d7fe 100644 --- a/setup.py +++ b/setup.py @@ -8,10 +8,6 @@ import datetime import os -import platform -import setuptools -import sys - from distutils.core import Command from distutils.errors import DistutilsOptionError @@ -24,32 +20,9 @@ from . import formatters as fmt -def _path_rel2file(*p): - # dirname instead of joining with pardir so it works if - # datalad_build_support/ is just symlinked into some extension - # while developing - if isinstance(p, str) and isabs(p): - # do not mess with absolute paths - return p - else: - # relative means relative to the datalad package - return opj(dirname(dirname(__file__)), *p) - - -def get_version(name): - """Load version from version.py without entailing any imports +import versioneer - Parameters - ---------- - name: str - Name of the folder (package) where from to read version.py - """ - # This might entail lots of imports which might not yet be available - # so let's do ad-hoc parsing of the version.py - with open(_path_rel2file(name, 'version.py')) as f: - version_lines = list(filter(lambda x: x.startswith('__version__'), f)) - assert (len(version_lines) == 1) - return version_lines[0].split('=')[1].strip(" '\"\t\n") +from . import formatters as fmt class BuildManPage(Command): @@ -87,8 +60,6 @@ def finalize_options(self): raise DistutilsOptionError('\'rstpath\' option is required') if self.parser is None: raise DistutilsOptionError('\'parser\' option is required') - self.manpath = _path_rel2file(self.manpath) - self.rstpath = _path_rel2file(self.rstpath) mod_name, func_name = self.parser.split(':') fromlist = mod_name.split('.') try: @@ -177,7 +148,7 @@ def run(self): format = cls( cmdname, ext_sections=sections, - version=get_version(getattr(self, 'mod_name', appname))) + version=versioneer.get_version()) formatted = format.format_man_page(p) with open(opj(opath, '{0}.{1}'.format( cmdname.replace(' ', '-'), @@ -203,8 +174,6 @@ def finalize_options(self): raise DistutilsOptionError('\'expath\' option is required') if self.rstpath is None: raise DistutilsOptionError('\'rstpath\' option is required') - self.expath = _path_rel2file(self.expath) - self.rstpath = _path_rel2file(self.rstpath) self.announce('Converting example scripts') def run(self): @@ -235,7 +204,6 @@ def initialize_options(self): def finalize_options(self): if self.rstpath is None: raise DistutilsOptionError('\'rstpath\' option is required') - self.rstpath = _path_rel2file(self.rstpath) self.announce('Generating configuration documentation') def run(self): @@ -285,249 +253,3 @@ def run(self): desc_tmpl += 'undocumented\n' v.update(docs) rst.write(_indent(desc_tmpl.format(**v), ' ')) - - -class BuildSchema(Command): - description = 'Generate DataLad JSON-LD schema.' - - user_options = [ - ('path=', None, 'output path for schema file'), - ] - - def initialize_options(self): - self.path = opj('docs', 'source', '_extras') - - def finalize_options(self): - if self.path is None: - raise DistutilsOptionError('\'path\' option is required') - self.path = _path_rel2file(self.path) - self.announce('Generating JSON-LD schema file') - - def run(self): - from datalad.metadata.definitions import common_defs - from datalad.metadata.definitions import version as schema_version - import json - import shutil - - def _mk_fname(label, version): - return '{}{}{}.json'.format( - label, - '_v' if version else '', - version) - - def _defs2context(defs, context_label, vocab_version, main_version=schema_version): - opath = opj( - self.path, - _mk_fname(context_label, vocab_version)) - odir = dirname(opath) - if not os.path.exists(odir): - os.makedirs(odir) - - # to become DataLad's own JSON-LD context - context = {} - schema = {"@context": context} - if context_label != 'schema': - schema['@vocab'] = 'http://docs.datalad.org/{}'.format( - _mk_fname('schema', main_version)) - for key, val in defs.items(): - # git-annex doesn't allow ':', but in JSON-LD we need it for - # namespace separation -- let's make '.' in git-annex mean - # ':' in JSON-LD - key = key.replace('.', ':') - definition = val['def'] - if definition.startswith('http://') or definition.startswith('https://'): - # this is not a URL, hence an @id definitions that points - # to another schema - context[key] = definition - continue - # the rest are compound definitions - props = {'@id': definition} - if 'unit' in val: - props['unit'] = val['unit'] - if 'descr' in val: - props['description'] = val['descr'] - context[key] = props - - with open(opath, 'w') as fp: - json.dump( - schema, - fp, - ensure_ascii=True, - indent=1, - separators=(', ', ': '), - sort_keys=True) - print('schema written to {}'.format(opath)) - - # core vocabulary - _defs2context(common_defs, 'schema', schema_version) - - # present the same/latest version also as the default - shutil.copy( - opj(self.path, _mk_fname('schema', schema_version)), - opj(self.path, 'schema.json')) - - -def setup_entry_points(entry_points): - """Sneaky monkey patching could be fixed only via even sneakier monkey patching - - It will never break, I promise! - """ - - def get_script_content(script_name, shebang="#!/usr/bin/env python"): - return linesep.join([ - shebang, - "#", - "# Custom simplistic runner for DataLad. Assumes datalad module", - "# being available. Generated by monkey patching monkey patched", - "# setuptools.", - "#", - "from %s import main" % entry_points[script_name], - "main()", - ""]).encode() - - def patch_write_script(mod): - """Patches write_script of the module with our shim to provide - lightweight invocation script - """ - - orig_meth = getattr(mod, 'write_script') - - def _provide_lean_script_contents( - self, script_name, contents, mode="t", *ignored): - # could be a script from another module -- let it be as is - if script_name in entry_points: - # keep shebang - contents = get_script_content( - script_name, - contents.splitlines()[0].decode()) - return orig_meth(self, script_name, contents, mode=mode) - - setattr(mod, 'write_script', _provide_lean_script_contents) - - # We still need this one so that setuptools known about the scripts - # So we generate some bogus ones, and provide a list of them ;) - # pre-generate paths so we could give them to setuptools - scripts_build_dir = opj('build', 'scripts_generated') - scripts = [opj(scripts_build_dir, x) for x in entry_points] - - if 'clean' not in sys.argv: - if not exists(scripts_build_dir): - makedirs(scripts_build_dir) - for s, mod in entry_points.items(): - with open(opj(scripts_build_dir, s), 'wb') as f: - f.write(get_script_content(s)) - - platform_system = platform.system().lower() - setup_kwargs = {} - - if platform_system == 'windows': - # TODO: investigate https://github.com/matthew-brett/myscripter, - # nibabel/nixext approach to support similar setup on Windows - setup_kwargs['entry_points'] = { - 'console_scripts': ['%s=%s:main' % i for i in entry_points.items()] - } - else: - # Damn you sharktopus! - from setuptools.command.install_scripts import \ - install_scripts as stinstall_scripts - from setuptools.command.easy_install import easy_install - - patch_write_script(stinstall_scripts) - patch_write_script(easy_install) - - setup_kwargs['scripts'] = scripts - - return setup_kwargs - - -def get_long_description_from_README(): - """Read README.md, convert to .rst using pypandoc - - If pypandoc is not available or fails - just output original .md. - - Returns - ------- - dict - with keys long_description and possibly long_description_content_type - for newer setuptools which support uploading of markdown as is. - """ - # PyPI used to not render markdown. Workaround for a sane appearance - # https://github.com/pypa/pypi-legacy/issues/148#issuecomment-227757822 - # is still in place for older setuptools - - README = opj(_path_rel2file('README.md')) - - ret = {} - if LooseVersion(setuptools.__version__) >= '38.6.0': - # check than this - ret['long_description'] = open(README).read() - ret['long_description_content_type'] = 'text/markdown' - return ret - - # Convert or fall-back - try: - import pypandoc - return {'long_description': pypandoc.convert(README, 'rst')} - except (ImportError, OSError) as exc: - # attempting to install pandoc via brew on OSX currently hangs and - # pypandoc imports but throws OSError demanding pandoc - print( - "WARNING: pypandoc failed to import or thrown an error while " - "converting" - " README.md to RST: %r .md version will be used as is" % exc - ) - return {'long_description': open(README).read()} - - -def findsome(subdir, extensions): - """Find files under subdir having specified extensions - - Leading directory (datalad) gets stripped - """ - return [ - f.split(pathsep, 1)[1] for f in findall(opj('datalad', subdir)) - if splitext(f)[-1].lstrip('.') in extensions - ] - - -def datalad_setup(name, **kwargs): - """A helper for a typical invocation of setuptools.setup. - - If not provided in kwargs, following fields will be autoset to the defaults - or obtained from the present on the file system files: - - - author - - author_email - - packages -- all found packages which start with `name` - - long_description -- converted to .rst using pypandoc README.md - - version -- parsed `__version__` within `name/version.py` - - Parameters - ---------- - name: str - Name of the Python package - **kwargs: - The rest of the keyword arguments passed to setuptools.setup as is - """ - # Simple defaults - for k, v in { - 'author': "The DataLad Team and Contributors", - 'author_email': "team@datalad.org" - }.items(): - if kwargs.get(k) is None: - kwargs[k] = v - - # More complex, requiring some function call - if kwargs.get('packages') is None: - kwargs['packages'] = find_packages('.') - if kwargs.get('long_description') is None: - kwargs.update(get_long_description_from_README()) - if kwargs.get('version') is None: - kwargs['version'] = get_version(name) - - cmdclass = kwargs.get('cmdclass', {}) - # Check if command needs some module specific handling - for v in cmdclass.values(): - if hasattr(v, 'handle_module'): - getattr(v, 'handle_module')(name, **kwargs) - return setup(name=name, **kwargs) From 023a1b77194508f7aa1d7ec5ec3bb222c1d840aa Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Mon, 15 Jun 2020 08:16:13 +0200 Subject: [PATCH 4/9] BF: Read package metadata directly from setup.cfg The distribution object does not seem to see it (at least not under all circumstances). --- setup.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/setup.py b/setup.py index c693d7fe..b83ccfa8 100644 --- a/setup.py +++ b/setup.py @@ -11,14 +11,11 @@ from distutils.core import Command from distutils.errors import DistutilsOptionError -from distutils.version import LooseVersion -from genericpath import exists -from os import linesep, makedirs -from os.path import dirname, join as opj, sep as pathsep, splitext, isabs -from setuptools import findall, find_packages, setup - -from . import formatters as fmt - +from os.path import ( + dirname, + join as opj, +) +from setuptools.config import read_configuration import versioneer @@ -129,9 +126,12 @@ def run(self): #appname = self._parser.prog appname = 'datalad' + cfg = read_configuration( + opj(dirname(dirname(__file__)), 'setup.cfg'))['metadata'] + sections = { 'Authors': """{0} is developed by {1} <{2}>.""".format( - appname, dist.get_author(), dist.get_author_email()), + appname, cfg['author'], cfg['author_email']), } for cls, opath, ext in ((fmt.ManPageFormatter, self.manpath, '1'), From 9975f82bd5b43cc6bd6e0d36fc18ae9382bb40ab Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Thu, 15 Apr 2021 08:15:27 +0200 Subject: [PATCH 5/9] BF: We only have lower-case command names In a command suite that only defines the class name of a command, and no explicit cmdline name, no manpage would be built, because the test against the parser content would look like `Ls != ls` This change enforces lower-case for autogenerated command names. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b83ccfa8..7f65a831 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,7 @@ def finalize_options(self): mod_name, suite_name = self.cmdsuite.split(':') mod = __import__(mod_name, fromlist=mod_name.split('.')) suite = getattr(mod, suite_name) - self.cmdlist = [c[2] if len(c) > 2 else c[1].replace('_', '-') + self.cmdlist = [c[2] if len(c) > 2 else c[1].replace('_', '-').lower() for c in suite[1]] self.announce('Writing man page(s) to %s' % self.manpath) From 56d1eebfb44ce0749ac70afd80eef9054c3a814a Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Mon, 25 Oct 2021 08:51:50 -0400 Subject: [PATCH 6/9] Stop using distutils --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 7f65a831..a4b92edc 100644 --- a/setup.py +++ b/setup.py @@ -9,12 +9,11 @@ import datetime import os -from distutils.core import Command -from distutils.errors import DistutilsOptionError from os.path import ( dirname, join as opj, ) +from setuptools import Command, DistutilsOptionError from setuptools.config import read_configuration import versioneer From e9568f6fc314f86940290064c8c9d2f8bf20e8f8 Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Wed, 2 Feb 2022 14:02:54 +0100 Subject: [PATCH 7/9] Adopt standard extension setup --- .codeclimate.yml | 17 + .gitattributes | 1 + .noannex | 0 MANIFEST.in | 6 +- Makefile | 50 +- datalad_container/_version.py | 520 ++++++++++ pyproject.toml | 2 + requirements-devel.txt | 7 +- setup.cfg | 56 + setup.py | 87 +- tools/appveyor_env_setup.bat | 4 + versioneer.py | 1822 +++++++++++++++++++++++++++++++++ 12 files changed, 2456 insertions(+), 116 deletions(-) create mode 100644 .codeclimate.yml create mode 100644 .gitattributes create mode 100644 .noannex create mode 100644 datalad_container/_version.py create mode 100644 pyproject.toml create mode 100644 setup.cfg create mode 100644 tools/appveyor_env_setup.bat create mode 100644 versioneer.py diff --git a/.codeclimate.yml b/.codeclimate.yml new file mode 100644 index 00000000..9ac1b30d --- /dev/null +++ b/.codeclimate.yml @@ -0,0 +1,17 @@ +version: "2" +checks: + file-lines: + config: + threshold: 500 +plugins: + bandit: + enabled: true + checks: + assert_used: + enabled: false +exclude_patterns: +- "_datalad_buildsupport/" +- "versioneer.py" +- "*/_version.py" +- "tools/" +- "**/tests/" diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..0a7f8090 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +datalad_container/_version.py export-subst diff --git a/.noannex b/.noannex new file mode 100644 index 00000000..e69de29b diff --git a/MANIFEST.in b/MANIFEST.in index 1034f72c..c974e64b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,5 @@ -include formatters.py setup_support.py +include CONTRIBUTORS LICENSE versioneer.py +graft _datalad_buildsupport +graft docs +prune docs/build +global-exclude *.py[cod] diff --git a/Makefile b/Makefile index aeff6e15..be524e99 100644 --- a/Makefile +++ b/Makefile @@ -1,47 +1,21 @@ -# simple makefile to simplify repetetive build env management tasks under posix -# Ideas borrowed from scikit-learn's and PyMVPA Makefiles -- thanks! - PYTHON ?= python -NOSETESTS ?= nosetests - -MODULE ?= datalad - -all: clean test clean: $(PYTHON) setup.py clean - rm -rf dist build bin docs/build docs/source/generated + rm -rf dist build bin docs/build docs/source/generated *.egg-info -find . -name '*.pyc' -delete -find . -name '__pycache__' -type d -delete -bin: - mkdir -p $@ - PYTHONPATH=bin:$(PYTHONPATH) python setup.py develop --install-dir $@ - -test-code: bin - PATH=bin:$(PATH) PYTHONPATH=bin:$(PYTHONPATH) $(NOSETESTS) -s -v $(MODULE) - -test-coverage: - rm -rf coverage .coverage - $(NOSETESTS) -s -v --with-coverage $(MODULE) - -test: test-code - - -trailing-spaces: - find $(MODULE) -name "*.py" -exec perl -pi -e 's/[ \t]*$$//' {} \; - -code-analysis: - flake8 $(MODULE) | grep -v __init__ | grep -v external - pylint -E -i y $(MODULE)/ # -d E1103,E0611,E1101 - -update-changelog: - @echo ".. This file is auto-converted from CHANGELOG.md (make update-changelog) -- do not edit\n\nChange log\n**********" > docs/source/changelog.rst - pandoc -t rst CHANGELOG.md >> docs/source/changelog.rst - -release-pypi: update-changelog - # better safe than sorry +release-pypi: + # avoid upload of stale builds test ! -e dist - python setup.py sdist - python setup.py bdist_wheel --universal + $(PYTHON) setup.py sdist bdist_wheel twine upload dist/* + +update-buildsupport: + git subtree pull \ + -m "Update DataLad build helper" \ + --squash \ + --prefix _datalad_buildsupport \ + https://github.com/datalad/datalad-buildsupport.git \ + master diff --git a/datalad_container/_version.py b/datalad_container/_version.py new file mode 100644 index 00000000..866f3e51 --- /dev/null +++ b/datalad_container/_version.py @@ -0,0 +1,520 @@ + +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "" + cfg.parentdir_prefix = "" + cfg.versionfile_source = "datalad_container/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["--git-dir=.git", "rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["--git-dir=.git", "describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["--git-dir=.git", "rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["--git-dir=.git", "rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["--git-dir=.git", "show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..f671f356 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[build-system] +requires = ["setuptools >= 43.0.0", "wheel"] diff --git a/requirements-devel.txt b/requirements-devel.txt index d61bfa12..7720a6c9 100644 --- a/requirements-devel.txt +++ b/requirements-devel.txt @@ -1 +1,6 @@ --e .[devel] +# requirements for a development environment +nose +coverage +sphinx +sphinx_rtd_theme +six diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..237f1ace --- /dev/null +++ b/setup.cfg @@ -0,0 +1,56 @@ +[metadata] +url = https://github.com/datalad/datalad-container +author = The DataLad Team and Contributors +author_email = team@datalad.org +description = DataLad extension package for working with containerized environments +long_description = file:README.md +long_description_content_type = text/markdown; charset=UTF-8 +license = MIT +classifiers = + Programming Language :: Python + License :: OSI Approved :: BSD License + Programming Language :: Python :: 3 + +[options] +python_requires = >= 3.7 +install_requires = + datalad >= 0.13 + requests>=1.2 # to talk to Singularity-hub +packages = find: +include_package_data = True + +[options.extras_require] +# this matches the name used by -core and what is expected by some CI setups +devel = + nose + coverage + +[options.packages.find] +# do not ship the build helpers +exclude= + _datalad_buildsupport + +[options.entry_points] +# 'datalad.extensions' is THE entrypoint inspected by the datalad API builders +datalad.extensions = + # the label in front of '=' is the command suite label + # the entrypoint can point to any symbol of any name, as long it is + # valid datalad interface specification (see demo in this extensions) + container = datalad_container:command_suite + +[versioneer] +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. +VCS = git +style = pep440 +versionfile_source = datalad_container/_version.py +versionfile_build = datalad_container/_version.py +tag_prefix = +parentdir_prefix = + +[coverage:report] +show_missing = True +omit = + # versioneer code + datalad_container/_version.py diff --git a/setup.py b/setup.py index e8b30d2e..93a7e8f3 100755 --- a/setup.py +++ b/setup.py @@ -1,82 +1,17 @@ #!/usr/bin/env python from setuptools import setup -from setuptools import find_packages -from setuptools import findall +import versioneer -from os.path import join as opj -from os.path import sep as pathsep -from os.path import splitext -from os.path import dirname - -from setup_support import BuildManPage -from setup_support import BuildRSTExamplesFromScripts -from setup_support import get_version - - -def findsome(subdir, extensions): - """Find files under subdir having specified extensions - - Leading directory (datalad) gets stripped - """ - return [ - f.split(pathsep, 1)[1] for f in findall(opj('datalad_container', subdir)) - if splitext(f)[-1].lstrip('.') in extensions - ] - -# extension version -version = get_version() - -cmdclass = { - 'build_manpage': BuildManPage, - 'build_examples': BuildRSTExamplesFromScripts, -} - -with open(opj(dirname(__file__), 'README.md')) as fp: - long_description = fp.read() +from _datalad_buildsupport.setup import ( + BuildManPage, +) -requires = { - 'core': [ - 'datalad>=0.13', - 'requests>=1.2', # to talk to Singularity-hub - ], - 'devel-docs': [ - # Documentation - 'sphinx>=1.6.2', - 'sphinx-rtd-theme', - ], - 'tests': [ - 'nose>=1.3.4', - 'coverage', - 'six', - ], -} -requires['devel'] = sum(list(requires.values()), []) +cmdclass = versioneer.get_cmdclass() +cmdclass.update(build_manpage=BuildManPage) -setup( - # basic project properties can be set arbitrarily - name="datalad_container", - author="The DataLad Team and Contributors", - author_email="team@datalad.org", - version=version, - description="DataLad extension package for working with containerized environments", - long_description=long_description, - long_description_content_type="text/markdown", - packages=[pkg for pkg in find_packages('.') if pkg.startswith('datalad')], - # datalad command suite specs from here - install_requires=requires['core'], - extras_require=requires, - cmdclass=cmdclass, - entry_points = { - # 'datalad.extensions' is THE entrypoint inspected by the datalad API builders - 'datalad.extensions': [ - # the label in front of '=' is the command suite label - # the entrypoint can point to any symbol of any name, as long it is - # valid datalad interface specification (see demo in this extension) - 'container=datalad_container:command_suite', - ], - 'datalad.tests': [ - 'container=datalad_container', - ], - }, -) +if __name__ == '__main__': + setup(name='datalad_container', + version=versioneer.get_version(), + cmdclass=cmdclass, + ) diff --git a/tools/appveyor_env_setup.bat b/tools/appveyor_env_setup.bat new file mode 100644 index 00000000..d4649012 --- /dev/null +++ b/tools/appveyor_env_setup.bat @@ -0,0 +1,4 @@ +set PY=%1-x64 +set TMP=C:\DLTMP +set TEMP=C:\DLTMP +set PATH=C:\Python%PY%;C:\Python%PY%\Scripts;%PATH% diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 00000000..51ca8182 --- /dev/null +++ b/versioneer.py @@ -0,0 +1,1822 @@ + +# Version: 0.18 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/warner/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other langauges) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + +### Unicode version strings + +While Versioneer works (and is continually tested) with both Python 2 and +Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. +Newer releases probably generate unicode version strings on py2. It's not +clear that this is wrong, but it may be surprising for applications when then +write these strings to a network connection or include them in bytes-oriented +APIs like cryptographic checksums. + +[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates +this question. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +""" + +from __future__ import print_function +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.SafeConfigParser() + with open(setup_cfg, "r") as f: + parser.readfp(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY['git'] = ''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add "version" to both distutils and setuptools + from setuptools import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + try: + from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + except ImportError: + from py2exe.build_exe import py2exe as _py2exe # py2 + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1) From e3dbd4ed5b325a279584709d51db5a1ebffc9fed Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Wed, 2 Feb 2022 14:11:09 +0100 Subject: [PATCH 8/9] Remove old build helpers --- formatters.py | 310 ----------------------------------------------- setup_support.py | 147 ---------------------- 2 files changed, 457 deletions(-) delete mode 100644 formatters.py delete mode 100644 setup_support.py diff --git a/formatters.py b/formatters.py deleted file mode 100644 index 4a42313a..00000000 --- a/formatters.py +++ /dev/null @@ -1,310 +0,0 @@ -# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# -# See COPYING file distributed along with the DataLad package for the -# copyright and license terms. -# -# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## - -import argparse -import datetime -import re - - -class ManPageFormatter(argparse.HelpFormatter): - # This code was originally distributed - # under the same License of Python - # Copyright (c) 2014 Oz Nahum Tiram - def __init__(self, - prog, - indent_increment=2, - max_help_position=4, - width=1000000, - section=1, - ext_sections=None, - authors=None, - version=None - ): - - super(ManPageFormatter, self).__init__( - prog, - indent_increment=indent_increment, - max_help_position=max_help_position, - width=width) - - self._prog = prog - self._section = 1 - self._today = datetime.date.today().strftime('%Y\\-%m\\-%d') - self._ext_sections = ext_sections - self._version = version - - def _get_formatter(self, **kwargs): - return self.formatter_class(prog=self.prog, **kwargs) - - def _markup(self, txt): - return txt.replace('-', '\\-') - - def _underline(self, string): - return "\\fI\\s-1" + string + "\\s0\\fR" - - def _bold(self, string): - if not string.strip().startswith('\\fB'): - string = '\\fB' + string - if not string.strip().endswith('\\fR'): - string = string + '\\fR' - return string - - def _mk_synopsis(self, parser): - self.add_usage(parser.usage, parser._actions, - parser._mutually_exclusive_groups, prefix='') - usage = self._format_usage(None, parser._actions, - parser._mutually_exclusive_groups, '') - # replace too long list of commands with a single placeholder - usage = re.sub(r'{[^]]*?create,.*?}', ' COMMAND ', usage, flags=re.MULTILINE) - # take care of proper wrapping - usage = re.sub(r'\[([-a-zA-Z0-9]*)\s([a-zA-Z0-9{}|_]*)\]', r'[\1\~\2]', usage) - - usage = usage.replace('%s ' % self._prog, '') - usage = '.SH SYNOPSIS\n.nh\n.HP\n\\fB%s\\fR %s\n.hy\n' % (self._markup(self._prog), - usage) - return usage - - def _mk_title(self, prog): - name_version = "\"{0} {1}\"".format(prog, self._version) - return '.TH {0} {1} {2} {3}\n'.format(prog, self._section, - self._today, name_version) - - def _make_name(self, parser): - """ - this method is in consitent with others ... it relies on - distribution - """ - return '.SH NAME\n%s \\- %s\n' % (parser.prog, - parser.description) - - def _mk_description(self, parser): - desc = parser.description - if not desc: - return '' - desc = desc.replace('\n\n', '\n.PP\n') - # sub-section headings - desc = re.sub(r'^\*(.*)\*$', r'.SS \1', desc, flags=re.MULTILINE) - # italic commands - desc = re.sub(r'^ ([-a-z]*)$', r'.TP\n\\fI\1\\fR', desc, flags=re.MULTILINE) - # deindent body text, leave to troff viewer - desc = re.sub(r'^ (\S.*)\n', '\\1\n', desc, flags=re.MULTILINE) - # format NOTEs as indented paragraphs - desc = re.sub(r'^NOTE\n', '.TP\nNOTE\n', desc, flags=re.MULTILINE) - # deindent indented paragraphs after heading setup - desc = re.sub(r'^ (.*)$', '\\1', desc, flags=re.MULTILINE) - - return '.SH DESCRIPTION\n%s\n' % self._markup(desc) - - def _mk_footer(self, sections): - if not hasattr(sections, '__iter__'): - return '' - - footer = [] - for section, value in sections.items(): - part = ".SH {}\n {}".format(section.upper(), value) - footer.append(part) - - return '\n'.join(footer) - - def format_man_page(self, parser): - page = [] - page.append(self._mk_title(self._prog)) - page.append(self._mk_synopsis(parser)) - page.append(self._mk_description(parser)) - page.append(self._mk_options(parser)) - page.append(self._mk_footer(self._ext_sections)) - - return ''.join(page) - - def _mk_options(self, parser): - - formatter = parser._get_formatter() - - # positionals, optionals and user-defined groups - for action_group in parser._action_groups: - formatter.start_section(None) - formatter.add_text(None) - formatter.add_arguments(action_group._group_actions) - formatter.end_section() - - # epilog - formatter.add_text(parser.epilog) - - # determine help from format above - help = formatter.format_help() - # add spaces after comma delimiters for easier reformatting - help = re.sub(r'([a-z]),([a-z])', '\\1, \\2', help) - # get proper indentation for argument items - help = re.sub(r'^ (\S.*)\n', '.TP\n\\1\n', help, flags=re.MULTILINE) - # deindent body text, leave to troff viewer - help = re.sub(r'^ (\S.*)\n', '\\1\n', help, flags=re.MULTILINE) - return '.SH OPTIONS\n' + help - - def _format_action_invocation(self, action): - if not action.option_strings: - metavar, = self._metavar_formatter(action, action.dest)(1) - return metavar - - else: - parts = [] - - # if the Optional doesn't take a value, format is: - # -s, --long - if action.nargs == 0: - parts.extend([self._bold(action_str) for action_str in - action.option_strings]) - - # if the Optional takes a value, format is: - # -s ARGS, --long ARGS - else: - default = self._underline(action.dest.upper()) - args_string = self._format_args(action, default) - for option_string in action.option_strings: - parts.append('%s %s' % (self._bold(option_string), - args_string)) - - return ', '.join(parts) - - -class RSTManPageFormatter(ManPageFormatter): - def _get_formatter(self, **kwargs): - return self.formatter_class(prog=self.prog, **kwargs) - - def _markup(self, txt): - # put general tune-ups here - return txt - - def _underline(self, string): - return "*{0}*".format(string) - - def _bold(self, string): - return "**{0}**".format(string) - - def _mk_synopsis(self, parser): - self.add_usage(parser.usage, parser._actions, - parser._mutually_exclusive_groups, prefix='') - usage = self._format_usage(None, parser._actions, - parser._mutually_exclusive_groups, '') - - usage = usage.replace('%s ' % self._prog, '') - usage = 'Synopsis\n--------\n::\n\n %s %s\n' \ - % (self._markup(self._prog), usage) - return usage - - def _mk_title(self, prog): - # and an easy to use reference point - title = ".. _man_%s:\n\n" % prog.replace(' ', '-') - title += "{0}".format(prog) - title += '\n{0}\n\n'.format('=' * len(prog)) - return title - - def _make_name(self, parser): - return '' - - def _mk_description(self, parser): - desc = parser.description - if not desc: - return '' - return 'Description\n-----------\n%s\n' % self._markup(desc) - - def _mk_footer(self, sections): - if not hasattr(sections, '__iter__'): - return '' - - footer = [] - for section, value in sections.items(): - part = "\n{0}\n{1}\n{2}\n".format( - section, - '-' * len(section), - value) - footer.append(part) - - return '\n'.join(footer) - - def _mk_options(self, parser): - - # this non-obvious maneuver is really necessary! - formatter = self.__class__(self._prog) - - # positionals, optionals and user-defined groups - for action_group in parser._action_groups: - formatter.start_section(None) - formatter.add_text(None) - formatter.add_arguments(action_group._group_actions) - formatter.end_section() - - # epilog - formatter.add_text(parser.epilog) - - # determine help from format above - option_sec = formatter.format_help() - - return '\n\nOptions\n-------\n{0}'.format(option_sec) - - def _format_action(self, action): - # determine the required width and the entry label - action_header = self._format_action_invocation(action) - - if action.help: - help_text = self._expand_help(action) - help_lines = self._split_lines(help_text, 80) - help = ' '.join(help_lines) - else: - help = '' - - # return a single string - return '{0}\n{1}\n{2}\n\n'.format( - action_header, - - '~' * len(action_header), - help) - - -def cmdline_example_to_rst(src, out=None, ref=None): - if out is None: - from six.moves import StringIO - out = StringIO() - - # place header - out.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') - if ref: - # place cross-ref target - out.write('.. {0}:\n\n'.format(ref)) - - # parser status vars - inexample = False - incodeblock = False - - for line in src: - if line.startswith('#% EXAMPLE START'): - inexample = True - incodeblock = False - continue - if not inexample: - continue - if line.startswith('#% EXAMPLE END'): - break - if not inexample: - continue - if line.startswith('#%'): - incodeblock = not incodeblock - if incodeblock: - out.write('\n.. code-block:: sh\n\n') - continue - if not incodeblock and line.startswith('#'): - out.write(line[(min(2, len(line) - 1)):]) - continue - if incodeblock: - if not line.rstrip().endswith('#% SKIP'): - out.write(' %s' % line) - continue - if not len(line.strip()): - continue - else: - raise RuntimeError("this should not happen") - - return out diff --git a/setup_support.py b/setup_support.py deleted file mode 100644 index 72ec099c..00000000 --- a/setup_support.py +++ /dev/null @@ -1,147 +0,0 @@ -# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## -# -# See COPYING file distributed along with the DataLad package for the -# copyright and license terms. -# -# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## - - -import os -from os.path import dirname, join as opj - -from distutils.core import Command -from distutils.errors import DistutilsOptionError -import datetime -import formatters as fmt - -cmdline_command_names = ( - 'containers-list', - 'containers-add', - 'containers-remove', - 'containers-run', -) - -def _path_rel2file(p): - return opj(dirname(__file__), p) - - -def get_version(): - """Load version of datalad from version.py without entailing any imports - """ - # This might entail lots of imports which might not yet be available - # so let's do ad-hoc parsing of the version.py - with open(opj(dirname(__file__), 'datalad_container', 'version.py')) as f: - version_lines = list(filter(lambda x: x.startswith('__version__'), f)) - assert (len(version_lines) == 1) - return version_lines[0].split('=')[1].strip(" '\"\t\n") - - -class BuildManPage(Command): - # The BuildManPage code was originally distributed - # under the same License of Python - # Copyright (c) 2014 Oz Nahum Tiram - - description = 'Generate man page from an ArgumentParser instance.' - - user_options = [ - ('manpath=', None, 'output path for manpages'), - ('rstpath=', None, 'output path for RST files'), - ('parser=', None, 'module path to an ArgumentParser instance' - '(e.g. mymod:func, where func is a method or function which return' - 'a dict with one or more arparse.ArgumentParser instances.'), - ] - - def initialize_options(self): - self.manpath = opj('build', 'man') - self.rstpath = opj('docs', 'source', 'generated', 'man') - self.parser = 'datalad.cmdline.main:setup_parser' - - def finalize_options(self): - if self.manpath is None: - raise DistutilsOptionError('\'manpath\' option is required') - if self.rstpath is None: - raise DistutilsOptionError('\'rstpath\' option is required') - if self.parser is None: - raise DistutilsOptionError('\'parser\' option is required') - self.manpath = _path_rel2file(self.manpath) - self.rstpath = _path_rel2file(self.rstpath) - mod_name, func_name = self.parser.split(':') - fromlist = mod_name.split('.') - try: - mod = __import__(mod_name, fromlist=fromlist) - self._parser = getattr(mod, func_name)( - ['datalad'], - formatter_class=fmt.ManPageFormatter, - return_subparsers=True) - - except ImportError as err: - raise err - - self.announce('Writing man page(s) to %s' % self.manpath) - self._today = datetime.date.today() - - def run(self): - - dist = self.distribution - #homepage = dist.get_url() - #appname = self._parser.prog - appname = 'datalad' - - sections = { - 'Authors': """{0} is developed by {1} <{2}>.""".format( - appname, dist.get_author(), dist.get_author_email()), - } - - dist = self.distribution - for cls, opath, ext in ((fmt.ManPageFormatter, self.manpath, '1'), - (fmt.RSTManPageFormatter, self.rstpath, 'rst')): - if not os.path.exists(opath): - os.makedirs(opath) - for cmdname in cmdline_command_names: - p = self._parser[cmdname] - cmdname = "{0}{1}".format( - 'datalad ' if cmdname != 'datalad' else '', - cmdname) - format = cls(cmdname, ext_sections=sections, version=get_version()) - formatted = format.format_man_page(p) - with open(opj(opath, '{0}.{1}'.format( - cmdname.replace(' ', '-'), - ext)), - 'w') as f: - f.write(formatted) - - -class BuildRSTExamplesFromScripts(Command): - description = 'Generate RST variants of example shell scripts.' - - user_options = [ - ('expath=', None, 'path to look for example scripts'), - ('rstpath=', None, 'output path for RST files'), - ] - - def initialize_options(self): - self.expath = opj('docs', 'examples') - self.rstpath = opj('docs', 'source', 'generated', 'examples') - - def finalize_options(self): - if self.expath is None: - raise DistutilsOptionError('\'expath\' option is required') - if self.rstpath is None: - raise DistutilsOptionError('\'rstpath\' option is required') - self.expath = _path_rel2file(self.expath) - self.rstpath = _path_rel2file(self.rstpath) - self.announce('Converting example scripts') - - def run(self): - opath = self.rstpath - if not os.path.exists(opath): - os.makedirs(opath) - - from glob import glob - for example in glob(opj(self.expath, '*.sh')): - exname = os.path.basename(example)[:-3] - with open(opj(opath, '{0}.rst'.format(exname)), 'w') as out: - fmt.cmdline_example_to_rst( - open(example), - out=out, - ref='_example_{0}'.format(exname)) From 522633a1398542380439ac0facd8dc26bed4ad47 Mon Sep 17 00:00:00 2001 From: Michael Hanke Date: Wed, 2 Feb 2022 14:30:53 +0100 Subject: [PATCH 9/9] Adjust sphinx setup for new build support --- docs/source/conf.py | 235 +++++------------------------------------- docs/source/index.rst | 13 --- 2 files changed, 26 insertions(+), 222 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 06bc03d6..c2e88d5b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -14,17 +14,17 @@ import sys import os -from os.path import join as opj, exists -from os.path import dirname -from os import pardir - -def setup(sphinx): - sys.path.insert(0, os.path.abspath('utils')) # travis - sys.path.insert(0, os.path.abspath(opj(pardir, 'utils'))) # RTD - from pygments_ansi_color import AnsiColorLexer - sphinx.add_lexer("ansi-color", AnsiColorLexer()) +import datetime +from os.path import ( + abspath, + dirname, + exists, + join as opj, +) +from os import pardir +import datalad_container # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -37,8 +37,14 @@ def setup(sphinx): if exists(setup_py_path): sys.path.insert(0, os.path.abspath(dirname(setup_py_path))) try: - for cmd in 'manpage', 'examples': - os.system('{} build_{}'.format(setup_py_path, cmd)) + for cmd in 'manpage',: #'examples': + os.system( + '{} build_{} --cmdsuite {} --manpath {} --rstpath {}'.format( + setup_py_path, + cmd, + 'datalad_container:command_suite', + abspath(opj(dirname(setup_py_path), 'build', 'man')), + opj(dirname(__file__), 'generated', 'man'))) except: # shut up and do your best pass @@ -76,25 +82,19 @@ def setup(sphinx): # source_suffix = ['.rst', '.md'] source_suffix = '.rst' -# The encoding of source files. -#source_encoding = 'utf-8-sig' - # The master toctree document. master_doc = 'index' # General information about the project. project = u'Datalad for containerized environments' -copyright = u'2018, DataLad team' +copyright = u'2018-{}, DataLad team'.format(datetime.datetime.now().year) author = u'DataLad team' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -# -# The short X.Y version. -version = '1.0' -# The full version, including alpha/beta/rc tags. -release = '1.1.0' +version = datalad_container.__version__ +release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -103,224 +103,41 @@ def setup(sphinx): # Usually you set "language" from the command line for these cases. language = None -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -#html_theme = 'alabaster' html_theme = 'sphinx_rtd_theme' -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = '_static/datalad_logo.png' -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = ['_extras'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - # If true, the index is split into individual pages for each letter. html_split_index = True # If true, links to the reST sources are added to the pages. html_show_sourcelink = False -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'datalad_containerdoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'datalad_container.tex', u'datalad_container Documentation', - u'DataLad team', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'datalad', u'datalad Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'datalad', u'datalad Documentation', - author, 'datalad', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} +# smart quotes are incompatible with the RST flavor of the generated manpages +# but see `smartquotes_action` for more fine-grained control, in case +# some of this functionality is needed +smartquotes = False diff --git a/docs/source/index.rst b/docs/source/index.rst index 1420ff84..e5671421 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -11,7 +11,6 @@ Documentation ============= * :ref:`Documentation index ` -* `Getting started`_ * `API reference`_ .. toctree:: @@ -21,18 +20,6 @@ Documentation acknowledgements -Getting started ---------------- - -.. toctree:: - :hidden: - - generated/examples/basic_demo - -.. include:: generated/examples/basic_demo.rst - :start-after: *************** - - API Reference =============