From b5e745ef46b5e19046c93036bd8461eb3b09e307 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Tue, 5 Oct 2021 13:35:49 -0400 Subject: [PATCH 01/36] Start reorganizing. --- tools/schemacode/MANIFEST.in | 1 + tools/schemacode/README.md | 3 + tools/schemacode/pyproject.toml | 29 + tools/schemacode/{ => schemacode}/__init__.py | 0 tools/schemacode/schemacode/_version.py | 554 +++++ tools/schemacode/schemacode/info.py | 84 + tools/schemacode/{ => schemacode}/schema.py | 10 +- tools/schemacode/schemacode/tests/__init__.py | 0 .../schemacode/tests/test_schema.py | 1 + tools/schemacode/{ => schemacode}/utils.py | 3 +- tools/schemacode/setup.cfg | 22 + tools/schemacode/setup.py | 57 + tools/schemacode/versioneer.py | 1885 +++++++++++++++++ 13 files changed, 2643 insertions(+), 6 deletions(-) create mode 100644 tools/schemacode/MANIFEST.in create mode 100644 tools/schemacode/README.md create mode 100644 tools/schemacode/pyproject.toml rename tools/schemacode/{ => schemacode}/__init__.py (100%) create mode 100644 tools/schemacode/schemacode/_version.py create mode 100644 tools/schemacode/schemacode/info.py rename tools/schemacode/{ => schemacode}/schema.py (99%) create mode 100644 tools/schemacode/schemacode/tests/__init__.py create mode 100644 tools/schemacode/schemacode/tests/test_schema.py rename tools/schemacode/{ => schemacode}/utils.py (98%) create mode 100644 tools/schemacode/setup.cfg create mode 100644 tools/schemacode/setup.py create mode 100644 tools/schemacode/versioneer.py diff --git a/tools/schemacode/MANIFEST.in b/tools/schemacode/MANIFEST.in new file mode 100644 index 0000000000..a24694ff41 --- /dev/null +++ b/tools/schemacode/MANIFEST.in @@ -0,0 +1 @@ +include versioneer.py diff --git a/tools/schemacode/README.md b/tools/schemacode/README.md new file mode 100644 index 0000000000..e2b682a800 --- /dev/null +++ b/tools/schemacode/README.md @@ -0,0 +1,3 @@ +# schemacode + +A Python library for working with the BIDS schema. diff --git a/tools/schemacode/pyproject.toml b/tools/schemacode/pyproject.toml new file mode 100644 index 0000000000..d362784a8c --- /dev/null +++ b/tools/schemacode/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = ["setuptools", "wheel"] + +[tool.black] +line-length = 99 +target-version = ['py37'] +include = '\.pyi?$' +exclude = ''' +( + /( + \.eggs # exclude a few common directories in the + | \.git # root of the project + | \.github + | \.hg + | \.pytest_cache + | _build + | build + | dist + )/ + | get_version.py + | versioneer.py + | schemacode/info.py + | schemacode/_version.py +) +''' + +[tool.isort] +profile = "black" +multi_line_output = 3 diff --git a/tools/schemacode/__init__.py b/tools/schemacode/schemacode/__init__.py similarity index 100% rename from tools/schemacode/__init__.py rename to tools/schemacode/schemacode/__init__.py diff --git a/tools/schemacode/schemacode/_version.py b/tools/schemacode/schemacode/_version.py new file mode 100644 index 0000000000..bac97d022e --- /dev/null +++ b/tools/schemacode/schemacode/_version.py @@ -0,0 +1,554 @@ +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "" + cfg.parentdir_prefix = "" + cfg.versionfile_source = "schemacode/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen( + [c] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r"\d", r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s*" % tag_prefix, + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split("/"): + root = os.path.dirname(root) + except NameError: + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/tools/schemacode/schemacode/info.py b/tools/schemacode/schemacode/info.py new file mode 100644 index 0000000000..d696c427b7 --- /dev/null +++ b/tools/schemacode/schemacode/info.py @@ -0,0 +1,84 @@ +"""Base module variables.""" +import importlib.util +import os.path as op +from pathlib import Path + +# Get version +spec = importlib.util.spec_from_file_location( + "_version", op.join(op.dirname(__file__), "schemacode/_version.py") +) +_version = importlib.util.module_from_spec(spec) +spec.loader.exec_module(_version) + +VERSION = _version.get_versions()["version"] +del _version + +# Get package description from README +# Since this file is executed from ../setup.py, the path to the README is determined by the +# location of setup.py. +readme_path = Path(__file__).parent.joinpath("README.md") +longdesc = readme_path.open().read() + +# Fields +AUTHOR = "bids-standard developers" +COPYRIGHT = "Copyright 2021, bids-standard developers" +CREDITS = "bids-standard developers" +LICENSE = "LGPL 2.1" +MAINTAINER = "" +EMAIL = "" +STATUS = "Prototype" +URL = "https://github.com/bids-standard/schemacode" +PACKAGENAME = "schemacode" +DESCRIPTION = "" +LONGDESC = longdesc + +DOWNLOAD_URL = "https://github.com/bids-standard/{name}/archive/{ver}.tar.gz".format( + name=PACKAGENAME, ver=VERSION +) + +REQUIRES = [ + "numpy", + "pandas", + "tabulate", + "pyyaml", +] + +TESTS_REQUIRES = [ + "codecov", + "coverage<5.0", + "flake8>=3.7", + "flake8-black", + "flake8-isort", + "pytest", + "pytest-cov", +] + +EXTRA_REQUIRES = { + "dev": ["versioneer"], + "doc": [ + "sphinx>=1.5.3", + "sphinx_rtd_theme", + ], + "tests": TESTS_REQUIRES, +} + +ENTRY_POINTS = {} + +# Enable a handle to install all extra dependencies at once +EXTRA_REQUIRES["all"] = list(set([v for deps in EXTRA_REQUIRES.values() for v in deps])) + +# Supported Python versions using PEP 440 version specifiers +# Should match the same set of Python versions as classifiers +PYTHON_REQUIRES = ">=3.6" + +# Package classifiers +CLASSIFIERS = [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering :: Information Analysis", + "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", +] diff --git a/tools/schemacode/schema.py b/tools/schemacode/schemacode/schema.py similarity index 99% rename from tools/schemacode/schema.py rename to tools/schemacode/schemacode/schema.py index 472efc0df3..d813d3535c 100644 --- a/tools/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -1,11 +1,10 @@ -"""Schema loading- and processing-related functions. -""" +"""Schema loading- and processing-related functions.""" import logging import os from copy import deepcopy from pathlib import Path -from warnings import warn from pprint import pprint +from warnings import warn import pandas as pd import yaml @@ -20,7 +19,10 @@ ) logging.basicConfig(format="%(asctime)-15s [%(levelname)8s] %(message)s") -BIDS_SCHEMA = Path(__file__).parent.parent / "src" / "schema" + +def get_schema_dir(): + schema_dir = Path(__file__).parent.parent.parent / "src" / "schema" + return schema_dir def _get_entry_name(path): diff --git a/tools/schemacode/schemacode/tests/__init__.py b/tools/schemacode/schemacode/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/schemacode/schemacode/tests/test_schema.py b/tools/schemacode/schemacode/tests/test_schema.py new file mode 100644 index 0000000000..91d7de897b --- /dev/null +++ b/tools/schemacode/schemacode/tests/test_schema.py @@ -0,0 +1 @@ +"""Tests for the schemacode package.""" \ No newline at end of file diff --git a/tools/schemacode/utils.py b/tools/schemacode/schemacode/utils.py similarity index 98% rename from tools/schemacode/utils.py rename to tools/schemacode/schemacode/utils.py index 36f449ee90..1a0051a609 100644 --- a/tools/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -1,5 +1,4 @@ -"""Utility functions for the bids-specification schema. -""" +"""Utility functions for the bids-specification schema.""" import logging import os.path as op diff --git a/tools/schemacode/setup.cfg b/tools/schemacode/setup.cfg new file mode 100644 index 0000000000..ae6de3aed3 --- /dev/null +++ b/tools/schemacode/setup.cfg @@ -0,0 +1,22 @@ +[versioneer] +VCS = git +style = pep440 +versionfile_source = schemacode/_version.py +versionfile_build = schemacode/_version.py +tag_prefix = +parentdir_prefix = + +[flake8] +max-line-length = 99 +exclude=*build/ +ignore = E203,E402,W503 +per-file-ignores = + */__init__.py:F401 + +[tool:pytest] +log_cli = true + +[options.package_data] +* = + resources/* + tests/data/* diff --git a/tools/schemacode/setup.py b/tools/schemacode/setup.py new file mode 100644 index 0000000000..413a51cf28 --- /dev/null +++ b/tools/schemacode/setup.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" schemacode setup script """ + + +def main(): + """ Install entry-point """ + import os.path as op + from inspect import currentframe, getfile + from io import open + + from setuptools import find_packages, setup + + import versioneer + + ver_file = op.join("schemacode", "info.py") + with open(ver_file) as f: + exec(f.read()) + vars = locals() + + root_dir = op.dirname(op.abspath(getfile(currentframe()))) + cmdclass = versioneer.get_cmdclass() + + pkg_data = { + "schemacode": [ + "tests/data/*", + ] + } + + setup( + name=vars["PACKAGENAME"], + version=vars["VERSION"], + description=vars["DESCRIPTION"], + long_description=vars["LONGDESC"], + long_description_content_type="text/markdown", + author=vars["AUTHOR"], + author_email=vars["EMAIL"], + maintainer=vars["MAINTAINER"], + maintainer_email=vars["EMAIL"], + url=vars["URL"], + license=vars["LICENSE"], + classifiers=vars["CLASSIFIERS"], + download_url=vars["DOWNLOAD_URL"], + # Dependencies handling + python_requires=vars["PYTHON_REQUIRES"], + install_requires=vars["REQUIRES"], + tests_require=vars["TESTS_REQUIRES"], + extras_require=vars["EXTRA_REQUIRES"], + entry_points=vars["ENTRY_POINTS"], + packages=find_packages(exclude=("tests",)), + zip_safe=False, + cmdclass=cmdclass, + ) + + +if __name__ == "__main__": + main() diff --git a/tools/schemacode/versioneer.py b/tools/schemacode/versioneer.py new file mode 100644 index 0000000000..2b54540510 --- /dev/null +++ b/tools/schemacode/versioneer.py @@ -0,0 +1,1885 @@ +# Version: 0.18 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/warner/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other langauges) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + +### Unicode version strings + +While Versioneer works (and is continually tested) with both Python 2 and +Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. +Newer releases probably generate unicode version strings on py2. It's not +clear that this is wrong, but it may be surprising for applications when then +write these strings to a network connection or include them in bytes-oriented +APIs like cryptographic checksums. + +[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates +this question. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +""" + +from __future__ import print_function + +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ( + "Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND')." + ) + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print( + "Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py) + ) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.SafeConfigParser() + with open(setup_cfg, "r") as f: + parser.readfp(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen( + [c] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY[ + "git" +] = ''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r"\d", r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s*" % tag_prefix, + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + 0 + ].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search( + r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) + if not mo: + mo = re.search( + r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert ( + cfg.versionfile_source is not None + ), "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + cmds["build_py"] = cmd_build_py + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if "py2exe" in sys.modules: # py2exe enabled? + try: + from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + except ImportError: + from py2exe.build_exe import py2exe as _py2exe # py2 + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file( + target_versionfile, self._versioneer_generated_versions + ) + + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except ( + EnvironmentError, + configparser.NoSectionError, + configparser.NoOptionError, + ) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print( + " appending versionfile_source ('%s') to MANIFEST.in" + % cfg.versionfile_source + ) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1) From 2cbcffc2ce85b63f003191e9ef634af6d26c2397 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Tue, 5 Oct 2021 15:46:26 -0400 Subject: [PATCH 02/36] Clean up docstrings. --- tools/schemacode/schemacode/schema.py | 11 ++--------- tools/schemacode/schemacode/utils.py | 1 + 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index 0036acb526..e3352dc555 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -20,11 +20,6 @@ logging.basicConfig(format="%(asctime)-15s [%(levelname)8s] %(message)s") -def get_schema_dir(): - schema_dir = Path(__file__).parent.parent.parent / "src" / "schema" - return schema_dir - - def _get_entry_name(path): if path.suffix == ".yaml": return path.name[:-5] # no .yaml @@ -168,8 +163,7 @@ def filter_schema(schema, **kwargs): def make_entity_definitions(schema): - """Generate definitions and other relevant information for entities in the - specification. + """Generate definitions and other relevant information for entities in the specification. Each entity gets its own heading. @@ -211,8 +205,7 @@ def make_entity_definitions(schema): def make_filename_template(schema, **kwargs): - """Create codeblocks containing example filename patterns for a given - datatype. + """Create codeblocks containing example filename patterns for a given datatype. Parameters ---------- diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index 1532437aa5..7a7e204d90 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -89,6 +89,7 @@ def set_logger_level(lgr, level): def drop_unused_entities(df): """Remove columns from a dataframe where all values in the column are NaNs. + For entity tables, this limits each table to only entities that are used within the modality. From 13044ba07c26ad9554ee727c8f7add51490dd555 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:10:39 -0400 Subject: [PATCH 03/36] Change license to MIT. --- tools/schemacode/schemacode/info.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/schemacode/schemacode/info.py b/tools/schemacode/schemacode/info.py index d696c427b7..6ed687c144 100644 --- a/tools/schemacode/schemacode/info.py +++ b/tools/schemacode/schemacode/info.py @@ -23,13 +23,13 @@ AUTHOR = "bids-standard developers" COPYRIGHT = "Copyright 2021, bids-standard developers" CREDITS = "bids-standard developers" -LICENSE = "LGPL 2.1" +LICENSE = "MIT" MAINTAINER = "" EMAIL = "" STATUS = "Prototype" URL = "https://github.com/bids-standard/schemacode" PACKAGENAME = "schemacode" -DESCRIPTION = "" +DESCRIPTION = "Python tools for working with the BIDS schema." LONGDESC = longdesc DOWNLOAD_URL = "https://github.com/bids-standard/{name}/archive/{ver}.tar.gz".format( @@ -76,7 +76,7 @@ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering :: Information Analysis", - "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", + "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", From 73e89c54d7b44f66604f8086e1521424e3389ae4 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:14:08 -0400 Subject: [PATCH 04/36] Fix spelling in versioneer files. --- tools/schemacode/schemacode/_version.py | 2 +- tools/schemacode/versioneer.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/tools/schemacode/schemacode/_version.py b/tools/schemacode/schemacode/_version.py index bac97d022e..8c0102e67f 100644 --- a/tools/schemacode/schemacode/_version.py +++ b/tools/schemacode/schemacode/_version.py @@ -292,7 +292,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces diff --git a/tools/schemacode/versioneer.py b/tools/schemacode/versioneer.py index 2b54540510..323046a244 100644 --- a/tools/schemacode/versioneer.py +++ b/tools/schemacode/versioneer.py @@ -179,7 +179,7 @@ `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. + provide bindings to Python (and perhaps other languages) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs @@ -281,6 +281,7 @@ import configparser except ImportError: import ConfigParser as configparser + import errno import json import os @@ -699,7 +700,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces @@ -1106,7 +1107,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces From 4b59f93269954fb08d61dd3dfa38e954c4051313 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:16:55 -0400 Subject: [PATCH 05/36] Install schemacode in config file. --- .circleci/config.yml | 1 + tools/mkdocs_macros_bids/macros.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 70f6abe904..c8b40b9eaf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,6 +11,7 @@ jobs: command: | python -m pip install --upgrade pip pip install -r requirements.txt + pip install -e ~/project/tools/schemacode/ - run: name: generate docs command: mkdocs build --clean --strict --verbose diff --git a/tools/mkdocs_macros_bids/macros.py b/tools/mkdocs_macros_bids/macros.py index 9a3cf15cc2..55d69c8fea 100644 --- a/tools/mkdocs_macros_bids/macros.py +++ b/tools/mkdocs_macros_bids/macros.py @@ -2,10 +2,11 @@ import os import sys +from schemacode import schema, utils + code_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) sys.path.append(code_path) -from schemacode import schema, utils from examplecode import example From 5df0354e8ba75dcc57ecb7d30313e761b027e7d1 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:20:25 -0400 Subject: [PATCH 06/36] Update config.yml --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c8b40b9eaf..bd7b0fde07 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,7 +11,7 @@ jobs: command: | python -m pip install --upgrade pip pip install -r requirements.txt - pip install -e ~/project/tools/schemacode/ + pip install ~/project/tools/schemacode/ - run: name: generate docs command: mkdocs build --clean --strict --verbose From 53839481d55d65ae4d02dbb8a61440a39cc4955a Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:40:50 -0400 Subject: [PATCH 07/36] Fix path to schema. I don't like this. We need something less dependent on folder locations. --- tools/schemacode/schemacode/utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index 7a7e204d90..a2a033bf3b 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -14,9 +14,10 @@ def get_schema_path(): str Absolute path to the folder containing schema-related files. """ - return op.abspath( - op.join(op.dirname(op.dirname(op.dirname(__file__))), "src", "schema") + op.sep + schema_dir = op.abspath( + op.join(op.dirname(op.dirname(op.dirname(op.dirname(__file__)))), "src", "schema") + op.sep ) + return schema_dir def combine_extensions(lst): From f2492f8b45ea157a746383480f5a4f2d79f8c435 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:47:43 -0400 Subject: [PATCH 08/36] Log. --- tools/schemacode/schemacode/schema.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index e3352dc555..72dfb4388a 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -80,6 +80,7 @@ def load_schema(schema_path): object_group_files = sorted(glob(str(objects_dir / "*.yaml"))) for object_group_file in object_group_files: group_name = op.splitext(op.basename(object_group_file))[0] + lgr.info(f"Loading {group_name} objects.") with open(object_group_file, "r") as fo: dict_ = yaml.load(fo, Loader=yaml.SafeLoader) dict_ = dereference_yaml(dict_, dict_) @@ -91,6 +92,7 @@ def load_schema(schema_path): rule_group_folders = [f for f in rule_group_folders if op.isdir(f)] for rule_group_file in rule_group_files: group_name = op.splitext(op.basename(rule_group_file))[0] + lgr.info(f"Loading {group_name} rules.") with open(rule_group_file, "r") as fo: dict_ = yaml.load(fo, Loader=yaml.SafeLoader) dict_ = dereference_yaml(dict_, dict_) @@ -103,6 +105,7 @@ def load_schema(schema_path): schema["rules"][group_name] = {} for rule_subgroup_file in rule_subgroup_files: subgroup_name = op.splitext(op.basename(rule_subgroup_file))[0] + lgr.info(f"Loading {subgroup_name} rules.") with open(rule_subgroup_file, "r") as fo: dict_ = yaml.load(fo, Loader=yaml.SafeLoader) dict_ = dereference_yaml(dict_, dict_) From 4d594f4c1676d6d7ada6736277f9e5a5c965e239 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 13:51:23 -0400 Subject: [PATCH 09/36] Why isn't CircleCI working? It works locally... --- tools/schemacode/schemacode/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index a2a033bf3b..328e8dd316 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -17,6 +17,7 @@ def get_schema_path(): schema_dir = op.abspath( op.join(op.dirname(op.dirname(op.dirname(op.dirname(__file__)))), "src", "schema") + op.sep ) + raise Exception(schema_dir) return schema_dir From 2c7db1a22dfe5e206975db6241126d01948ba0bd Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 14:01:44 -0400 Subject: [PATCH 10/36] Make loading function more restrictive. --- tools/schemacode/schemacode/schema.py | 5 +++++ tools/schemacode/schemacode/utils.py | 1 - 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index 72dfb4388a..8b38781fc1 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -72,6 +72,11 @@ def load_schema(schema_path): objects_dir = schema_path / "objects/" rules_dir = schema_path / "rules/" + if not objects_dir.is_dir() or not rules_dir.is_dir(): + raise ValueError( + f"Schema path or paths do not exist:\n\t{str(objects_dir)}\n\t{str(rules_dir)}" + ) + schema = {} schema["objects"] = {} schema["rules"] = {} diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index 328e8dd316..a2a033bf3b 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -17,7 +17,6 @@ def get_schema_path(): schema_dir = op.abspath( op.join(op.dirname(op.dirname(op.dirname(op.dirname(__file__)))), "src", "schema") + op.sep ) - raise Exception(schema_dir) return schema_dir From a3e7dfec3480f82488d986b9849e587d68a829b0 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 11 Oct 2021 15:01:43 -0400 Subject: [PATCH 11/36] Move rendering functions into a new module. --- tools/mkdocs_macros_bids/macros.py | 12 +- tools/schemacode/schemacode/__init__.py | 3 +- tools/schemacode/schemacode/render.py | 371 ++++++++++++++++++ tools/schemacode/schemacode/schema.py | 363 +---------------- tools/schemacode/schemacode/tests/conftest.py | 10 + .../schemacode/tests/test_schema.py | 14 +- 6 files changed, 405 insertions(+), 368 deletions(-) create mode 100644 tools/schemacode/schemacode/render.py create mode 100644 tools/schemacode/schemacode/tests/conftest.py diff --git a/tools/mkdocs_macros_bids/macros.py b/tools/mkdocs_macros_bids/macros.py index 55d69c8fea..a6ea5c2a73 100644 --- a/tools/mkdocs_macros_bids/macros.py +++ b/tools/mkdocs_macros_bids/macros.py @@ -2,7 +2,7 @@ import os import sys -from schemacode import schema, utils +from schemacode import render, schema, utils code_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) sys.path.append(code_path) @@ -29,7 +29,7 @@ def make_filename_template(**kwargs): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - codeblock = schema.make_filename_template(schema_obj, **kwargs) + codeblock = render.make_filename_template(schema_obj, **kwargs) return codeblock @@ -51,7 +51,7 @@ def make_entity_table(**kwargs): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - table = schema.make_entity_table(schema_obj, **kwargs) + table = render.make_entity_table(schema_obj, **kwargs) return table @@ -67,7 +67,7 @@ def make_entity_definitions(): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - text = schema.make_entity_definitions(schema_obj) + text = render.make_entity_definitions(schema_obj) return text @@ -87,7 +87,7 @@ def make_suffix_table(suffixes): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - table = schema.make_suffix_table(schema_obj, suffixes) + table = render.make_suffix_table(schema_obj, suffixes) return table @@ -112,7 +112,7 @@ def make_metadata_table(field_info): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - table = schema.make_metadata_table(schema_obj, field_info) + table = render.make_metadata_table(schema_obj, field_info) return table diff --git a/tools/schemacode/schemacode/__init__.py b/tools/schemacode/schemacode/__init__.py index 3f492a4087..d01d8a33d3 100644 --- a/tools/schemacode/schemacode/__init__.py +++ b/tools/schemacode/schemacode/__init__.py @@ -1,7 +1,8 @@ """A Python package for working with the BIDS schema.""" -from . import schema, utils +from . import render, schema, utils __all__ = [ + "render", "schema", "utils", ] diff --git a/tools/schemacode/schemacode/render.py b/tools/schemacode/schemacode/render.py new file mode 100644 index 0000000000..f3dfaaa7b9 --- /dev/null +++ b/tools/schemacode/schemacode/render.py @@ -0,0 +1,371 @@ +"""Functions for rendering portions of the schema as text.""" +import logging +import os + +import pandas as pd +from tabulate import tabulate + +from . import utils +from .schema import filter_schema + +lgr = utils.get_logger() +# Basic settings for output, for now just basic +utils.set_logger_level( + lgr, os.environ.get("BIDS_SCHEMA_LOG_LEVEL", logging.INFO) +) +logging.basicConfig(format="%(asctime)-15s [%(levelname)8s] %(message)s") + + +def make_entity_definitions(schema): + """Generate definitions and other relevant information for entities in the specification. + + Each entity gets its own heading. + + Parameters + ---------- + schema : dict + The schema object, which is a dictionary with nested dictionaries and + lists stored within it. + + Returns + ------- + text : str + A string containing descriptions and some formatting + information about the entities in the schema. + """ + entity_order = schema["rules"]["entities"] + entity_definitions = schema["objects"]["entities"] + + text = "" + for entity in entity_order: + entity_info = entity_definitions[entity] + entity_shorthand = entity_info["entity"] + text += "\n" + text += "## {}".format(entity_shorthand) + text += "\n\n" + text += "Full name: {}".format(entity_info["name"]) + text += "\n\n" + text += "Format: `{}-<{}>`".format( + entity_info["entity"], + entity_info.get("format", "label"), + ) + text += "\n\n" + if "enum" in entity_info.keys(): + text += "Allowed values: `{}`".format("`, `".join(entity_info["enum"])) + text += "\n\n" + + text += "Definition: {}".format(entity_info["description"]) + return text + + +def make_filename_template(schema, **kwargs): + """Create codeblocks containing example filename patterns for a given datatype. + + Parameters + ---------- + schema : dict + The schema object, which is a dictionary with nested dictionaries and + lists stored within it. + kwargs : dict + Keyword arguments used to filter the schema. + Example kwargs that may be used include: "suffixes", "datatypes", + "extensions". + + Returns + ------- + codeblock : str + A multiline string containing the filename templates for file types + in the schema, after filtering. + """ + schema = filter_schema(schema, **kwargs) + + entity_order = schema["rules"]["entities"] + + paragraph = "" + # Parent folders + paragraph += "{}-<{}>/\n\t[{}-<{}>/]\n".format( + schema["objects"]["entities"]["subject"]["entity"], + schema["objects"]["entities"]["subject"]["format"], + schema["objects"]["entities"]["session"]["entity"], + schema["objects"]["entities"]["session"]["format"], + ) + + for datatype in schema["rules"]["datatypes"].keys(): + paragraph += "\t\t{}/\n".format(datatype) + + # Unique filename patterns + for group in schema["rules"]["datatypes"][datatype]: + string = "\t\t\t" + for ent in entity_order: + ent_format = "{}-<{}>".format( + schema["objects"]["entities"][ent]["entity"], + schema["objects"]["entities"][ent].get("format", "label") + ) + if ent in group["entities"]: + if group["entities"][ent] == "required": + if len(string.strip()): + string += "_" + ent_format + else: + # Only the first entity doesn't need an underscore + string += ent_format + else: + if len(string.strip()): + string += "[_" + ent_format + "]" + else: + # Only the first entity doesn't need an underscore + string += "[" + ent_format + "]" + + # In cases of large numbers of suffixes, + # we use the "suffix" variable and expect a table later in the spec + if len(group["suffixes"]) > 5: + suffix = "_" + string += suffix + strings = [string] + else: + strings = [ + string + "_" + suffix for suffix in group["suffixes"] + ] + + # Add extensions + full_strings = [] + extensions = group["extensions"] + extensions = [ + ext if ext != "*" else "." for ext in extensions + ] + extensions = utils.combine_extensions(extensions) + if len(extensions) > 5: + # Combine exts when there are many, but keep JSON separate + if ".json" in extensions: + extensions = [".", ".json"] + else: + extensions = ["."] + + for extension in extensions: + for string in strings: + new_string = string + extension + full_strings.append(new_string) + + full_strings = sorted(full_strings) + if full_strings: + paragraph += "\n".join(full_strings) + "\n" + + paragraph = paragraph.rstrip() + codeblock = "Template:\n```Text\n" + paragraph + "\n```" + codeblock = codeblock.expandtabs(4) + return codeblock + + +def make_entity_table(schema, tablefmt="github", **kwargs): + """Produce entity table (markdown) based on schema. + + Parameters + ---------- + schema_path : str + Folder containing schema, which is stored in yaml files. + entities_file : str, optional + File in which entities are described. + This is used for hyperlinks in the table, so the path to the file + should be considered from the location of out_file. + Default is '09-entities.md'. + + Returns + ------- + table_str : str + Markdown string containing the table. + """ + schema = filter_schema(schema, **kwargs) + + ENTITIES_FILE = "09-entities.md" + + # prepare the table based on the schema + # import pdb; pdb.set_trace() + header = ["Entity", "DataType"] + formats = ["Format", "DataType"] + entity_to_col = {} + table = [formats] + + # Compose header and formats first + for i, (entity, spec) in enumerate(schema["objects"]["entities"].items()): + entity_shorthand = schema["objects"]["entities"][entity]["entity"] + header.append(spec["name"]) + formats.append( + f'[`{entity_shorthand}-<{spec.get("format", "label")}>`]' + f"({ENTITIES_FILE}#{entity_shorthand})" + ) + entity_to_col[entity] = i + 1 + + # Go through data types + for dtype, dtype_specs in schema["rules"]["datatypes"].items(): + dtype_rows = {} + + # each dtype could have multiple specs + for spec in dtype_specs: + suffixes = spec.get("suffixes") + + # Skip this part of the schema if no suffixes are found. + # This is a hack to work around filter_schema's limitations. + if not len(suffixes): + continue + + # TODO:
is specific for html form + suffixes_str = " ".join(suffixes) if suffixes else "" + dtype_row = [dtype] + ([""] * len(entity_to_col)) + for ent, req in spec.get("entities", []).items(): + dtype_row[entity_to_col[ent]] = req.upper() + + # Merge specs within dtypes if they share all of the same entities + if dtype_row in dtype_rows.values(): + for k, v in dtype_rows.items(): + if dtype_row == v: + dtype_rows.pop(k) + new_k = k + " " + suffixes_str + new_k = new_k.strip() + dtype_rows[new_k] = v + break + else: + dtype_rows[suffixes_str] = dtype_row + + # Reformat first column + dtype_rows = { + dtype + "
({})".format(k): v for k, v in dtype_rows.items() + } + dtype_rows = [[k] + v for k, v in dtype_rows.items()] + table += dtype_rows + + # Create multi-level index because first two rows are headers + cols = list(zip(header, table[0])) + cols = pd.MultiIndex.from_tuples(cols) + table = pd.DataFrame(data=table[1:], columns=cols) + table = table.set_index(("Entity", "Format")) + + # Remove unnecessary columns + table = utils.drop_unused_entities(table) + table = utils.flatten_multiindexed_columns(table) + + # Print it as markdown + table_str = tabulate(table, headers="keys", tablefmt=tablefmt) + return table_str + + +def make_suffix_table(schema, suffixes, tablefmt="github"): + """Produce suffix table (markdown) based on requested suffixes. + + Parameters + ---------- + schema : dict + suffixes : list of str + tablefmt : str + + Returns + ------- + table_str : str + Tabulated table as a string. + """ + # The filter function doesn't work here. + suffix_schema = schema["objects"]["suffixes"] + + suffixes_found = [f for f in suffixes if f in suffix_schema.keys()] + suffixes_not_found = [f for f in suffixes if f not in suffix_schema.keys()] + if suffixes_not_found: + raise Exception( + "Warning: Missing suffixes: {}".format( + ", ".join(suffixes_not_found) + ) + ) + + df = pd.DataFrame( + index=suffixes_found, + columns=["**Name**", "**Description**"], + ) + # Index by suffix because name cannot be assumed to be unique + df.index.name = "`suffix`" + for suffix in suffixes_found: + suffix_info = suffix_schema[suffix] + description = suffix_info["description"] + # A backslash before a newline means continue a string + description = description.replace("\\\n", "") + # Two newlines should be respected + description = description.replace("\n\n", "
") + # Otherwise a newline corresponds to a space + description = description.replace("\n", " ") + + df.loc[suffix] = [suffix_info["name"], description] + + df = df.reset_index(drop=False) + df = df.set_index("**Name**") + df = df[["`suffix`", "**Description**"]] + + # Print it as markdown + table_str = tabulate(df, headers="keys", tablefmt=tablefmt) + return table_str + + +def make_metadata_table(schema, field_info, tablefmt="github"): + """Produce metadata table (markdown) based on requested fields. + + Parameters + ---------- + schema : dict + The BIDS schema. + field_info : dict of strings or tuples + A dictionary mapping metadata keys to requirement levels in the + rendered metadata table. + The dictionary values may be strings, in which case the string + is the requirement level information, or two-item tuples of strings, + in which case the first string is the requirement level information + and the second string is additional table-specific information + about the metadata field that will be appended to the field's base + definition from the schema. + tablefmt : string, optional + The target table format. The default is "github" (GitHub format). + + Returns + ------- + table_str : str + The tabulated table as a Markdown string. + """ + fields = list(field_info.keys()) + # The filter function doesn't work here. + metadata_schema = schema["objects"]["metadata"] + + retained_fields = [f for f in fields if f in metadata_schema.keys()] + dropped_fields = [f for f in fields if f not in metadata_schema.keys()] + if dropped_fields: + print("Warning: Missing fields: {}".format(", ".join(dropped_fields))) + + # Use the "name" field in the table, to allow for filenames to not match + # "names". + df = pd.DataFrame( + index=[metadata_schema[f]["name"] for f in retained_fields], + columns=["**Requirement Level**", "**Data type**", "**Description**"], + ) + df.index.name = "**Key name**" + for field in retained_fields: + field_name = metadata_schema[field]["name"] + requirement_info = field_info[field] + description_addendum = "" + if isinstance(requirement_info, tuple): + requirement_info, description_addendum = requirement_info + + requirement_info = requirement_info.replace( + "DEPRECATED", + "[DEPRECATED](/02-common-principles.html#definitions)", + ) + + type_string = utils.resolve_metadata_type(metadata_schema[field]) + + description = ( + metadata_schema[field]["description"] + " " + description_addendum + ) + # A backslash before a newline means continue a string + description = description.replace("\\\n", "") + # Two newlines should be respected + description = description.replace("\n\n", "
") + # Otherwise a newline corresponds to a space + description = description.replace("\n", " ") + + df.loc[field_name] = [requirement_info, type_string, description] + + # Print it as markdown + table_str = tabulate(df, headers="keys", tablefmt=tablefmt) + return table_str diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index 8b38781fc1..efb976e491 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -6,9 +6,7 @@ from glob import glob from pathlib import Path -import pandas as pd import yaml -from tabulate import tabulate from . import utils @@ -85,7 +83,7 @@ def load_schema(schema_path): object_group_files = sorted(glob(str(objects_dir / "*.yaml"))) for object_group_file in object_group_files: group_name = op.splitext(op.basename(object_group_file))[0] - lgr.info(f"Loading {group_name} objects.") + lgr.debug(f"Loading {group_name} objects.") with open(object_group_file, "r") as fo: dict_ = yaml.load(fo, Loader=yaml.SafeLoader) dict_ = dereference_yaml(dict_, dict_) @@ -97,7 +95,7 @@ def load_schema(schema_path): rule_group_folders = [f for f in rule_group_folders if op.isdir(f)] for rule_group_file in rule_group_files: group_name = op.splitext(op.basename(rule_group_file))[0] - lgr.info(f"Loading {group_name} rules.") + lgr.debug(f"Loading {group_name} rules.") with open(rule_group_file, "r") as fo: dict_ = yaml.load(fo, Loader=yaml.SafeLoader) dict_ = dereference_yaml(dict_, dict_) @@ -110,7 +108,7 @@ def load_schema(schema_path): schema["rules"][group_name] = {} for rule_subgroup_file in rule_subgroup_files: subgroup_name = op.splitext(op.basename(rule_subgroup_file))[0] - lgr.info(f"Loading {subgroup_name} rules.") + lgr.debug(f"Loading {subgroup_name} rules.") with open(rule_subgroup_file, "r") as fo: dict_ = yaml.load(fo, Loader=yaml.SafeLoader) dict_ = dereference_yaml(dict_, dict_) @@ -168,358 +166,3 @@ def filter_schema(schema, **kwargs): if isinstance(item, dict): new_schema[i] = filter_schema(item, **kwargs) return new_schema - - -def make_entity_definitions(schema): - """Generate definitions and other relevant information for entities in the specification. - - Each entity gets its own heading. - - Parameters - ---------- - schema : dict - The schema object, which is a dictionary with nested dictionaries and - lists stored within it. - - Returns - ------- - text : str - A string containing descriptions and some formatting - information about the entities in the schema. - """ - entity_order = schema["rules"]["entities"] - entity_definitions = schema["objects"]["entities"] - - text = "" - for entity in entity_order: - entity_info = entity_definitions[entity] - entity_shorthand = entity_info["entity"] - text += "\n" - text += "## {}".format(entity_shorthand) - text += "\n\n" - text += "Full name: {}".format(entity_info["name"]) - text += "\n\n" - text += "Format: `{}-<{}>`".format( - entity_info["entity"], - entity_info.get("format", "label"), - ) - text += "\n\n" - if "enum" in entity_info.keys(): - text += "Allowed values: `{}`".format("`, `".join(entity_info["enum"])) - text += "\n\n" - - text += "Definition: {}".format(entity_info["description"]) - return text - - -def make_filename_template(schema, **kwargs): - """Create codeblocks containing example filename patterns for a given datatype. - - Parameters - ---------- - schema : dict - The schema object, which is a dictionary with nested dictionaries and - lists stored within it. - kwargs : dict - Keyword arguments used to filter the schema. - Example kwargs that may be used include: "suffixes", "datatypes", - "extensions". - - Returns - ------- - codeblock : str - A multiline string containing the filename templates for file types - in the schema, after filtering. - """ - schema = filter_schema(schema, **kwargs) - - entity_order = schema["rules"]["entities"] - - paragraph = "" - # Parent folders - paragraph += "{}-<{}>/\n\t[{}-<{}>/]\n".format( - schema["objects"]["entities"]["subject"]["entity"], - schema["objects"]["entities"]["subject"]["format"], - schema["objects"]["entities"]["session"]["entity"], - schema["objects"]["entities"]["session"]["format"], - ) - - for datatype in schema["rules"]["datatypes"].keys(): - paragraph += "\t\t{}/\n".format(datatype) - - # Unique filename patterns - for group in schema["rules"]["datatypes"][datatype]: - string = "\t\t\t" - for ent in entity_order: - ent_format = "{}-<{}>".format( - schema["objects"]["entities"][ent]["entity"], - schema["objects"]["entities"][ent].get("format", "label") - ) - if ent in group["entities"]: - if group["entities"][ent] == "required": - if len(string.strip()): - string += "_" + ent_format - else: - # Only the first entity doesn't need an underscore - string += ent_format - else: - if len(string.strip()): - string += "[_" + ent_format + "]" - else: - # Only the first entity doesn't need an underscore - string += "[" + ent_format + "]" - - # In cases of large numbers of suffixes, - # we use the "suffix" variable and expect a table later in the spec - if len(group["suffixes"]) > 5: - suffix = "_" - string += suffix - strings = [string] - else: - strings = [ - string + "_" + suffix for suffix in group["suffixes"] - ] - - # Add extensions - full_strings = [] - extensions = group["extensions"] - extensions = [ - ext if ext != "*" else "." for ext in extensions - ] - extensions = utils.combine_extensions(extensions) - if len(extensions) > 5: - # Combine exts when there are many, but keep JSON separate - if ".json" in extensions: - extensions = [".", ".json"] - else: - extensions = ["."] - - for extension in extensions: - for string in strings: - new_string = string + extension - full_strings.append(new_string) - - full_strings = sorted(full_strings) - if full_strings: - paragraph += "\n".join(full_strings) + "\n" - - paragraph = paragraph.rstrip() - codeblock = "Template:\n```Text\n" + paragraph + "\n```" - codeblock = codeblock.expandtabs(4) - return codeblock - - -def make_entity_table(schema, tablefmt="github", **kwargs): - """Produce entity table (markdown) based on schema. - - Parameters - ---------- - schema_path : str - Folder containing schema, which is stored in yaml files. - entities_file : str, optional - File in which entities are described. - This is used for hyperlinks in the table, so the path to the file - should be considered from the location of out_file. - Default is '09-entities.md'. - - Returns - ------- - table_str : str - Markdown string containing the table. - """ - schema = filter_schema(schema, **kwargs) - - ENTITIES_FILE = "09-entities.md" - - # prepare the table based on the schema - # import pdb; pdb.set_trace() - header = ["Entity", "DataType"] - formats = ["Format", "DataType"] - entity_to_col = {} - table = [formats] - - # Compose header and formats first - for i, (entity, spec) in enumerate(schema["objects"]["entities"].items()): - entity_shorthand = schema["objects"]["entities"][entity]["entity"] - header.append(spec["name"]) - formats.append( - f'[`{entity_shorthand}-<{spec.get("format", "label")}>`]' - f"({ENTITIES_FILE}#{entity_shorthand})" - ) - entity_to_col[entity] = i + 1 - - # Go through data types - for dtype, dtype_specs in schema["rules"]["datatypes"].items(): - dtype_rows = {} - - # each dtype could have multiple specs - for spec in dtype_specs: - suffixes = spec.get("suffixes") - - # Skip this part of the schema if no suffixes are found. - # This is a hack to work around filter_schema's limitations. - if not len(suffixes): - continue - - # TODO:
is specific for html form - suffixes_str = " ".join(suffixes) if suffixes else "" - dtype_row = [dtype] + ([""] * len(entity_to_col)) - for ent, req in spec.get("entities", []).items(): - dtype_row[entity_to_col[ent]] = req.upper() - - # Merge specs within dtypes if they share all of the same entities - if dtype_row in dtype_rows.values(): - for k, v in dtype_rows.items(): - if dtype_row == v: - dtype_rows.pop(k) - new_k = k + " " + suffixes_str - new_k = new_k.strip() - dtype_rows[new_k] = v - break - else: - dtype_rows[suffixes_str] = dtype_row - - # Reformat first column - dtype_rows = { - dtype + "
({})".format(k): v for k, v in dtype_rows.items() - } - dtype_rows = [[k] + v for k, v in dtype_rows.items()] - table += dtype_rows - - # Create multi-level index because first two rows are headers - cols = list(zip(header, table[0])) - cols = pd.MultiIndex.from_tuples(cols) - table = pd.DataFrame(data=table[1:], columns=cols) - table = table.set_index(("Entity", "Format")) - - # Remove unnecessary columns - table = utils.drop_unused_entities(table) - table = utils.flatten_multiindexed_columns(table) - - # Print it as markdown - table_str = tabulate(table, headers="keys", tablefmt=tablefmt) - return table_str - - -def make_suffix_table(schema, suffixes, tablefmt="github"): - """Produce suffix table (markdown) based on requested suffixes. - - Parameters - ---------- - schema : dict - suffixes : list of str - tablefmt : str - - Returns - ------- - table_str : str - Tabulated table as a string. - """ - # The filter function doesn't work here. - suffix_schema = schema["objects"]["suffixes"] - - suffixes_found = [f for f in suffixes if f in suffix_schema.keys()] - suffixes_not_found = [f for f in suffixes if f not in suffix_schema.keys()] - if suffixes_not_found: - raise Exception( - "Warning: Missing suffixes: {}".format( - ", ".join(suffixes_not_found) - ) - ) - - df = pd.DataFrame( - index=suffixes_found, - columns=["**Name**", "**Description**"], - ) - # Index by suffix because name cannot be assumed to be unique - df.index.name = "`suffix`" - for suffix in suffixes_found: - suffix_info = suffix_schema[suffix] - description = suffix_info["description"] - # A backslash before a newline means continue a string - description = description.replace("\\\n", "") - # Two newlines should be respected - description = description.replace("\n\n", "
") - # Otherwise a newline corresponds to a space - description = description.replace("\n", " ") - - df.loc[suffix] = [suffix_info["name"], description] - - df = df.reset_index(drop=False) - df = df.set_index("**Name**") - df = df[["`suffix`", "**Description**"]] - - # Print it as markdown - table_str = tabulate(df, headers="keys", tablefmt=tablefmt) - return table_str - - -def make_metadata_table(schema, field_info, tablefmt="github"): - """Produce metadata table (markdown) based on requested fields. - - Parameters - ---------- - schema : dict - The BIDS schema. - field_info : dict of strings or tuples - A dictionary mapping metadata keys to requirement levels in the - rendered metadata table. - The dictionary values may be strings, in which case the string - is the requirement level information, or two-item tuples of strings, - in which case the first string is the requirement level information - and the second string is additional table-specific information - about the metadata field that will be appended to the field's base - definition from the schema. - tablefmt : string, optional - The target table format. The default is "github" (GitHub format). - - Returns - ------- - table_str : str - The tabulated table as a Markdown string. - """ - fields = list(field_info.keys()) - # The filter function doesn't work here. - metadata_schema = schema["objects"]["metadata"] - - retained_fields = [f for f in fields if f in metadata_schema.keys()] - dropped_fields = [f for f in fields if f not in metadata_schema.keys()] - if dropped_fields: - print("Warning: Missing fields: {}".format(", ".join(dropped_fields))) - - # Use the "name" field in the table, to allow for filenames to not match - # "names". - df = pd.DataFrame( - index=[metadata_schema[f]["name"] for f in retained_fields], - columns=["**Requirement Level**", "**Data type**", "**Description**"], - ) - df.index.name = "**Key name**" - for field in retained_fields: - field_name = metadata_schema[field]["name"] - requirement_info = field_info[field] - description_addendum = "" - if isinstance(requirement_info, tuple): - requirement_info, description_addendum = requirement_info - - requirement_info = requirement_info.replace( - "DEPRECATED", - "[DEPRECATED](/02-common-principles.html#definitions)", - ) - - type_string = utils.resolve_metadata_type(metadata_schema[field]) - - description = ( - metadata_schema[field]["description"] + " " + description_addendum - ) - # A backslash before a newline means continue a string - description = description.replace("\\\n", "") - # Two newlines should be respected - description = description.replace("\n\n", "
") - # Otherwise a newline corresponds to a space - description = description.replace("\n", " ") - - df.loc[field_name] = [requirement_info, type_string, description] - - # Print it as markdown - table_str = tabulate(df, headers="keys", tablefmt=tablefmt) - return table_str diff --git a/tools/schemacode/schemacode/tests/conftest.py b/tools/schemacode/schemacode/tests/conftest.py new file mode 100644 index 0000000000..7737987903 --- /dev/null +++ b/tools/schemacode/schemacode/tests/conftest.py @@ -0,0 +1,10 @@ +from pathlib import Path + +import pytest + + +@pytest.fixture(scope="session") +def schema_dir(): + """Path to the schema housed in the bids-specification repo.""" + bids_schema = Path(__file__).parent.parent.parent.parent.parent / "src" / "schema" + return bids_schema diff --git a/tools/schemacode/schemacode/tests/test_schema.py b/tools/schemacode/schemacode/tests/test_schema.py index 91d7de897b..f44e868729 100644 --- a/tools/schemacode/schemacode/tests/test_schema.py +++ b/tools/schemacode/schemacode/tests/test_schema.py @@ -1 +1,13 @@ -"""Tests for the schemacode package.""" \ No newline at end of file +"""Tests for the schemacode package.""" +import pytest + +from schemacode import schema + + +def test_load_schema(schema_dir): + bad_path = "/path/to/nowhere" + with pytest.raises(ValueError): + schema.load_schema(bad_path) + + schema_obj = schema.load_schema(schema_dir) + assert isinstance(schema_obj, dict) From 1a7e713e6ba67555dcd051080bef67208d1bfc6d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 15 Dec 2021 10:53:39 -0500 Subject: [PATCH 12/36] RF: Use setup.cfg over info.py --- tools/schemacode/schemacode/info.py | 84 ----------------------------- tools/schemacode/setup.cfg | 47 +++++++++++++++- tools/schemacode/setup.py | 71 ++++++------------------ 3 files changed, 62 insertions(+), 140 deletions(-) delete mode 100644 tools/schemacode/schemacode/info.py diff --git a/tools/schemacode/schemacode/info.py b/tools/schemacode/schemacode/info.py deleted file mode 100644 index 6ed687c144..0000000000 --- a/tools/schemacode/schemacode/info.py +++ /dev/null @@ -1,84 +0,0 @@ -"""Base module variables.""" -import importlib.util -import os.path as op -from pathlib import Path - -# Get version -spec = importlib.util.spec_from_file_location( - "_version", op.join(op.dirname(__file__), "schemacode/_version.py") -) -_version = importlib.util.module_from_spec(spec) -spec.loader.exec_module(_version) - -VERSION = _version.get_versions()["version"] -del _version - -# Get package description from README -# Since this file is executed from ../setup.py, the path to the README is determined by the -# location of setup.py. -readme_path = Path(__file__).parent.joinpath("README.md") -longdesc = readme_path.open().read() - -# Fields -AUTHOR = "bids-standard developers" -COPYRIGHT = "Copyright 2021, bids-standard developers" -CREDITS = "bids-standard developers" -LICENSE = "MIT" -MAINTAINER = "" -EMAIL = "" -STATUS = "Prototype" -URL = "https://github.com/bids-standard/schemacode" -PACKAGENAME = "schemacode" -DESCRIPTION = "Python tools for working with the BIDS schema." -LONGDESC = longdesc - -DOWNLOAD_URL = "https://github.com/bids-standard/{name}/archive/{ver}.tar.gz".format( - name=PACKAGENAME, ver=VERSION -) - -REQUIRES = [ - "numpy", - "pandas", - "tabulate", - "pyyaml", -] - -TESTS_REQUIRES = [ - "codecov", - "coverage<5.0", - "flake8>=3.7", - "flake8-black", - "flake8-isort", - "pytest", - "pytest-cov", -] - -EXTRA_REQUIRES = { - "dev": ["versioneer"], - "doc": [ - "sphinx>=1.5.3", - "sphinx_rtd_theme", - ], - "tests": TESTS_REQUIRES, -} - -ENTRY_POINTS = {} - -# Enable a handle to install all extra dependencies at once -EXTRA_REQUIRES["all"] = list(set([v for deps in EXTRA_REQUIRES.values() for v in deps])) - -# Supported Python versions using PEP 440 version specifiers -# Should match the same set of Python versions as classifiers -PYTHON_REQUIRES = ">=3.6" - -# Package classifiers -CLASSIFIERS = [ - "Development Status :: 2 - Pre-Alpha", - "Intended Audience :: Science/Research", - "Topic :: Scientific/Engineering :: Information Analysis", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", -] diff --git a/tools/schemacode/setup.cfg b/tools/schemacode/setup.cfg index ae6de3aed3..821d22c1ef 100644 --- a/tools/schemacode/setup.cfg +++ b/tools/schemacode/setup.cfg @@ -1,9 +1,54 @@ +[metadata] +name = schemacode +url = https://github.com/bids-standard/schemacode +author = bids-standard developers +author_email = bids.maintenance@gmail.com +description = Python tools for working with the BIDS schema. +long_description = file:README.md +long_description_content_type = text/markdown; charset=UTF-8; variant=GFM +license = MIT +classifiers = + Development Status :: 2 - Pre-Alpha + Intended Audience :: Science/Research + Topic :: Scientific/Engineering :: Information Analysis + License :: OSI Approved :: MIT License + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + +[options] +python_requires = >=3.6 +install_requires = + numpy + pandas + tabulate + pyyaml +packages = find: +include_package_data = False + +[options.extras_require] +doc = + sphinx>=1.5.3 + sphinx_rtd_theme +tests = + codecov + coverage<5.0 + flake8>=3.7 + flake8-black + flake8-isort + pytest + pytest-cov +all = + %(doc)s + %(tests)s + [versioneer] VCS = git style = pep440 versionfile_source = schemacode/_version.py versionfile_build = schemacode/_version.py -tag_prefix = +tag_prefix = v parentdir_prefix = [flake8] diff --git a/tools/schemacode/setup.py b/tools/schemacode/setup.py index 413a51cf28..b50d479bf2 100644 --- a/tools/schemacode/setup.py +++ b/tools/schemacode/setup.py @@ -1,57 +1,18 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- -""" schemacode setup script """ +import sys +from setuptools import setup +import versioneer + +# Give setuptools a hint to complain if it's too old a version +# 30.3.0 allows us to put most metadata in setup.cfg +# Should match pyproject.toml +SETUP_REQUIRES = ['setuptools >= 30.3.0'] +# This enables setuptools to install wheel on-the-fly +SETUP_REQUIRES += ['wheel'] if 'bdist_wheel' in sys.argv else [] + +if __name__ == '__main__': + setup(name="pybids", + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), + setup_requires=SETUP_REQUIRES) - -def main(): - """ Install entry-point """ - import os.path as op - from inspect import currentframe, getfile - from io import open - - from setuptools import find_packages, setup - - import versioneer - - ver_file = op.join("schemacode", "info.py") - with open(ver_file) as f: - exec(f.read()) - vars = locals() - - root_dir = op.dirname(op.abspath(getfile(currentframe()))) - cmdclass = versioneer.get_cmdclass() - - pkg_data = { - "schemacode": [ - "tests/data/*", - ] - } - - setup( - name=vars["PACKAGENAME"], - version=vars["VERSION"], - description=vars["DESCRIPTION"], - long_description=vars["LONGDESC"], - long_description_content_type="text/markdown", - author=vars["AUTHOR"], - author_email=vars["EMAIL"], - maintainer=vars["MAINTAINER"], - maintainer_email=vars["EMAIL"], - url=vars["URL"], - license=vars["LICENSE"], - classifiers=vars["CLASSIFIERS"], - download_url=vars["DOWNLOAD_URL"], - # Dependencies handling - python_requires=vars["PYTHON_REQUIRES"], - install_requires=vars["REQUIRES"], - tests_require=vars["TESTS_REQUIRES"], - extras_require=vars["EXTRA_REQUIRES"], - entry_points=vars["ENTRY_POINTS"], - packages=find_packages(exclude=("tests",)), - zip_safe=False, - cmdclass=cmdclass, - ) - - -if __name__ == "__main__": - main() From 74b5e3197b7650b06824c7c10c239761135e439d Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 15 Dec 2021 11:12:09 -0500 Subject: [PATCH 13/36] ENH: Link schema code into package --- tools/schemacode/schemacode/data/schema | 1 + tools/schemacode/setup.cfg | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) create mode 120000 tools/schemacode/schemacode/data/schema diff --git a/tools/schemacode/schemacode/data/schema b/tools/schemacode/schemacode/data/schema new file mode 120000 index 0000000000..a06955547a --- /dev/null +++ b/tools/schemacode/schemacode/data/schema @@ -0,0 +1 @@ +../../../../src/schema \ No newline at end of file diff --git a/tools/schemacode/setup.cfg b/tools/schemacode/setup.cfg index 821d22c1ef..3d9e3419fa 100644 --- a/tools/schemacode/setup.cfg +++ b/tools/schemacode/setup.cfg @@ -43,6 +43,11 @@ all = %(doc)s %(tests)s +[options.package_data] +schemacode = + data/schema/*/*.yaml + data/schema/*/*/*.yaml + [versioneer] VCS = git style = pep440 @@ -60,8 +65,3 @@ per-file-ignores = [tool:pytest] log_cli = true - -[options.package_data] -* = - resources/* - tests/data/* From af516d8b342513ec965e6e1522250f1db20eac39 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 15 Dec 2021 11:19:34 -0500 Subject: [PATCH 14/36] RF: Use packaged schema --- tools/schemacode/schemacode/utils.py | 5 +---- tools/schemacode/setup.cfg | 3 ++- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index a2a033bf3b..3380016796 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -14,10 +14,7 @@ def get_schema_path(): str Absolute path to the folder containing schema-related files. """ - schema_dir = op.abspath( - op.join(op.dirname(op.dirname(op.dirname(op.dirname(__file__)))), "src", "schema") + op.sep - ) - return schema_dir + return op.abspath(op.join(op.dirname(__file__), "data", "schema")) def combine_extensions(lst): diff --git a/tools/schemacode/setup.cfg b/tools/schemacode/setup.cfg index 3d9e3419fa..59d4889d92 100644 --- a/tools/schemacode/setup.cfg +++ b/tools/schemacode/setup.cfg @@ -25,7 +25,8 @@ install_requires = tabulate pyyaml packages = find: -include_package_data = False +include_package_data = false +zip_safe = false [options.extras_require] doc = From fadc12c0e605c9345653f8fe05249d145fb58bac Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 15 Dec 2021 11:27:47 -0500 Subject: [PATCH 15/36] MNT: Require schemacode package to build docs --- requirements.txt | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 806c28a3c1..92fa48536f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,4 @@ mkdocs-material>=5.4 pymdown-extensions>=7.0.0 mkdocs-branchcustomization-plugin~=0.1.3 mkdocs-macros-plugin -numpy -pandas -PYYaml -tabulate +tools/schemacode/ From 61b13feac33762625f9df765449f8153ae6fd5f0 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Wed, 15 Dec 2021 11:58:52 -0500 Subject: [PATCH 16/36] Fix locations of functions. --- tools/mkdocs_macros_bids/macros.py | 5 ++--- tools/schemacode/pyproject.toml | 2 -- tools/schemacode/setup.py | 5 +++-- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/tools/mkdocs_macros_bids/macros.py b/tools/mkdocs_macros_bids/macros.py index d36e488d0d..6f4363fbff 100644 --- a/tools/mkdocs_macros_bids/macros.py +++ b/tools/mkdocs_macros_bids/macros.py @@ -8,7 +8,6 @@ sys.path.append(code_path) from examplecode import example -from schemacode import schema, utils def make_filename_template(**kwargs): @@ -83,7 +82,7 @@ def make_glossary(): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - text = schema.make_glossary(schema_obj) + text = render.make_glossary(schema_obj) return text @@ -153,7 +152,7 @@ def make_columns_table(column_info): """ schemapath = utils.get_schema_path() schema_obj = schema.load_schema(schemapath) - table = schema.make_columns_table(schema_obj, column_info) + table = render.make_columns_table(schema_obj, column_info) return table diff --git a/tools/schemacode/pyproject.toml b/tools/schemacode/pyproject.toml index d362784a8c..2568b7167e 100644 --- a/tools/schemacode/pyproject.toml +++ b/tools/schemacode/pyproject.toml @@ -17,9 +17,7 @@ exclude = ''' | build | dist )/ - | get_version.py | versioneer.py - | schemacode/info.py | schemacode/_version.py ) ''' diff --git a/tools/schemacode/setup.py b/tools/schemacode/setup.py index b50d479bf2..a3d81a9ce2 100644 --- a/tools/schemacode/setup.py +++ b/tools/schemacode/setup.py @@ -1,6 +1,8 @@ #!/usr/bin/env python import sys + from setuptools import setup + import versioneer # Give setuptools a hint to complain if it's too old a version @@ -11,8 +13,7 @@ SETUP_REQUIRES += ['wheel'] if 'bdist_wheel' in sys.argv else [] if __name__ == '__main__': - setup(name="pybids", + setup(name="schemacode", version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), setup_requires=SETUP_REQUIRES) - From 43c7a370f45220e71d3bd6ddc003f0057262b061 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Wed, 15 Dec 2021 13:12:00 -0500 Subject: [PATCH 17/36] Add a couple of tests. --- tools/schemacode/schemacode/render.py | 3 --- tools/schemacode/schemacode/tests/conftest.py | 12 +++++++++--- tools/schemacode/schemacode/tests/test_schema.py | 16 ++++++++++++++++ 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/tools/schemacode/schemacode/render.py b/tools/schemacode/schemacode/render.py index 0f78c2ef73..b4954d3321 100644 --- a/tools/schemacode/schemacode/render.py +++ b/tools/schemacode/schemacode/render.py @@ -8,9 +8,6 @@ from . import utils from .schema import filter_schema -# import utils - - lgr = utils.get_logger() # Basic settings for output, for now just basic utils.set_logger_level( diff --git a/tools/schemacode/schemacode/tests/conftest.py b/tools/schemacode/schemacode/tests/conftest.py index 7737987903..5bc4f2dc9e 100644 --- a/tools/schemacode/schemacode/tests/conftest.py +++ b/tools/schemacode/schemacode/tests/conftest.py @@ -1,10 +1,16 @@ -from pathlib import Path - import pytest +from schemacode import schema, utils + @pytest.fixture(scope="session") def schema_dir(): """Path to the schema housed in the bids-specification repo.""" - bids_schema = Path(__file__).parent.parent.parent.parent.parent / "src" / "schema" + bids_schema = utils.get_schema_path() return bids_schema + + +@pytest.fixture(scope="session") +def schema_obj(schema_dir): + """Schema object.""" + return schema.load_schema(schema_dir) diff --git a/tools/schemacode/schemacode/tests/test_schema.py b/tools/schemacode/schemacode/tests/test_schema.py index f44e868729..5d1400a778 100644 --- a/tools/schemacode/schemacode/tests/test_schema.py +++ b/tools/schemacode/schemacode/tests/test_schema.py @@ -5,9 +5,25 @@ def test_load_schema(schema_dir): + """Smoke test for schemacode.schema.load_schema.""" + # Pointing to a nonexistent folder should raise a ValueError bad_path = "/path/to/nowhere" with pytest.raises(ValueError): schema.load_schema(bad_path) + # Otherwise the function should return a dictionary schema_obj = schema.load_schema(schema_dir) assert isinstance(schema_obj, dict) + + +def test_object_definitions(schema_obj): + """Ensure that object definitions in the schema contain required fields.""" + for obj_type, obj_type_def in schema_obj["objects"].items(): + for obj_key, obj_def in obj_type_def.items(): + # Private/inheritable definitions (ones starting with "_") do not need to conform to + # the same rules as user-facing terms, so we skip them + if obj_key.startswith("_"): + continue + + assert "name" in obj_def.keys(), obj_key + assert "description" in obj_def.keys(), obj_key From 3b54a6123da0d1f5032e149adf0b3c48dde3aca6 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Wed, 15 Dec 2021 13:26:35 -0500 Subject: [PATCH 18/36] Draft a couple more tests. --- .../schemacode/schemacode/tests/test_utils.py | 30 +++++++++++++++++++ tools/schemacode/schemacode/utils.py | 13 +++++++- 2 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 tools/schemacode/schemacode/tests/test_utils.py diff --git a/tools/schemacode/schemacode/tests/test_utils.py b/tools/schemacode/schemacode/tests/test_utils.py new file mode 100644 index 0000000000..dc2773e8c6 --- /dev/null +++ b/tools/schemacode/schemacode/tests/test_utils.py @@ -0,0 +1,30 @@ +from schemacode import utils + + +def test_combine_extensions(): + """A unit test for utils.combine_extensions.""" + test_extensions = ["nii.gz", "nii", "json"] + target_combined = ["nii[.gz]", "json"] + test_combined = utils.combine_extensions(test_extensions) + assert test_combined == target_combined + + +def test_resolve_metadata_type(): + """A unit test for utils.resolve_metadata_type.""" + base_definition = { + "name": "Term", + "description": "A description", + } + + # Basic string + term_definition1 = base_definition.copy() + term_definition1["type"] = "string" + target_description = "[string](https://www.w3schools.com/js/js_json_datatypes.asp)" + type_description = utils.resolve_metadata_type(term_definition1) + assert target_description == type_description + + # When n/a is the only allowed value, the type should say "n/a" + term_definition1["enum"] = ["n/a"] + target_description = '`"n/a"`' + type_description = utils.resolve_metadata_type(term_definition1) + assert target_description == type_description diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index 3380016796..d593b9ff3c 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -151,7 +151,18 @@ def get_link(string): def resolve_metadata_type(definition): - """Generate string of metadata type from dictionary.""" + """Generate string of metadata type from dictionary. + + Parameters + ---------- + definition : :obj:`dict` + A schema object definition for a metadata term. + + Returns + ------- + string : :obj:`str` + A string describing the valid value types for the metadata term. + """ if "type" in definition.keys(): string = get_link(definition["type"]) From 376fbcb382349370b004b3e4ca68af9e23be5698 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 10:43:50 -0500 Subject: [PATCH 19/36] Draft Action for schemacode CI. I based this one off of a similar Action in nilearn. It'll probably require some debugging... --- .github/workflows/schemacode_ci.yml | 64 +++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 .github/workflows/schemacode_ci.yml diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml new file mode 100644 index 0000000000..563ef094dd --- /dev/null +++ b/.github/workflows/schemacode_ci.yml @@ -0,0 +1,64 @@ +name: "schemacode_ci" + +on: + push: + branches: + - "master" + pull_request: + branches: + - "*" + +jobs: + check_skip: + runs-on: ubuntu-latest + outputs: + skip: ${{ steps.result_step.outputs.ci-skip }} + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - id: result_step + uses: mstachniuk/ci-skip@master + with: + commit-filter: "[skip ci];[ci skip];[skip github]" + commit-filter-separator: ";" + + latest: + needs: check_skip + if: ${{ needs.check_skip.outputs.skip == 'false' }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest"] + python-version: ["3.8"] + name: ${{ matrix.os }} with Python ${{ matrix.python-version }} + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v2 + + - name: "Set up Python" + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: "Display Python version" + shell: bash {0} + run: python -c "import sys; print(sys.version)" + + - name: "Install the schemacode package" + shell: bash {0} + run: | + python -m pip install --progress-bar off --upgrade pip setuptools wheel flake8 + python -m pip install ./tools/schemacode/ + + - name: "Run tests" + shell: bash {0} + run: | + python -m pytest --pyargs schemacode --cov=schemacode ./tools/schemacode/ + + - name: "Upload coverage to CodeCov" + uses: codecov/codecov-action@v1 + if: success() From 1332c9e0304d391f8822be93cae234b81ea3f312 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 10:46:33 -0500 Subject: [PATCH 20/36] Fix installation. --- .github/workflows/schemacode_ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index 563ef094dd..667e22f066 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -51,8 +51,8 @@ jobs: - name: "Install the schemacode package" shell: bash {0} run: | - python -m pip install --progress-bar off --upgrade pip setuptools wheel flake8 - python -m pip install ./tools/schemacode/ + python -m pip install --progress-bar off --upgrade pip setuptools wheel + python -m pip install ./tools/schemacode[tests] - name: "Run tests" shell: bash {0} From 78581065d4394c0fe0d71461ac3ab14f47df11c3 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 10:50:38 -0500 Subject: [PATCH 21/36] Update schemacode_ci.yml --- .github/workflows/schemacode_ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index 667e22f066..e654a2218e 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -52,7 +52,7 @@ jobs: shell: bash {0} run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel - python -m pip install ./tools/schemacode[tests] + python -m pip install -e ./tools/schemacode[tests] - name: "Run tests" shell: bash {0} From 2e48e7d8793cc94790fa029abd5bc0c354596691 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 10:54:54 -0500 Subject: [PATCH 22/36] Add badge. --- tools/schemacode/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/schemacode/README.md b/tools/schemacode/README.md index e2b682a800..0781fd5e8d 100644 --- a/tools/schemacode/README.md +++ b/tools/schemacode/README.md @@ -1,3 +1,5 @@ # schemacode A Python library for working with the BIDS schema. + +[![codecov](https://codecov.io/gh/bids-standard/bids-specification/branch/master/graph/badge.svg)](https://codecov.io/gh/bids-standard/bids-specification) From 963ad6171a05436e8f64bb00a7179de305d449da Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 11:16:37 -0500 Subject: [PATCH 23/36] Add schema-related files to the CODEOWNERS file. --- CODEOWNERS | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CODEOWNERS b/CODEOWNERS index 5dbad3bf60..24cc697363 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -18,3 +18,7 @@ /src/05-derivatives/05-functional-derivatives.md @effigies /src/05-derivatives/06-diffusion-derivatives.md @francopestilli @oesteban @Lestropie /src/99-appendices/06-meg-file-formats.md @monkeyman192 + +# The schema +/src/schema/ @tsalo +/tools/schemacode/ @tsalo From 31bcd196b601481b73c5c6f1101d9f60cdb37fc9 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 14:21:39 -0500 Subject: [PATCH 24/36] Run versioneer 0.21 instead of 0.18 and run black. --- .gitattributes | 1 + tools/schemacode/MANIFEST.in | 1 + tools/schemacode/schemacode/__init__.py | 4 + tools/schemacode/schemacode/_version.py | 368 ++++++---- tools/schemacode/schemacode/render.py | 26 +- tools/schemacode/schemacode/schema.py | 8 +- tools/schemacode/schemacode/utils.py | 7 +- tools/schemacode/setup.py | 16 +- tools/schemacode/versioneer.py | 935 +++++++++++++++--------- 9 files changed, 833 insertions(+), 533 deletions(-) diff --git a/.gitattributes b/.gitattributes index 2bd791b40d..d2563f43d1 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,3 +2,4 @@ *.png -text *.jpg -text *.webm -text +tools/schemacode/schemacode/_version.py export-subst diff --git a/tools/schemacode/MANIFEST.in b/tools/schemacode/MANIFEST.in index a24694ff41..c67a6ea54b 100644 --- a/tools/schemacode/MANIFEST.in +++ b/tools/schemacode/MANIFEST.in @@ -1 +1,2 @@ include versioneer.py +include schemacode/_version.py diff --git a/tools/schemacode/schemacode/__init__.py b/tools/schemacode/schemacode/__init__.py index d01d8a33d3..a00379421a 100644 --- a/tools/schemacode/schemacode/__init__.py +++ b/tools/schemacode/schemacode/__init__.py @@ -6,3 +6,7 @@ "schema", "utils", ] + +from . import _version + +__version__ = _version.get_versions()["version"] diff --git a/tools/schemacode/schemacode/_version.py b/tools/schemacode/schemacode/_version.py index 8c0102e67f..537930fbb8 100644 --- a/tools/schemacode/schemacode/_version.py +++ b/tools/schemacode/schemacode/_version.py @@ -1,3 +1,4 @@ + # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -5,7 +6,7 @@ # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) +# versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" @@ -14,6 +15,7 @@ import re import subprocess import sys +from typing import Callable, Dict def get_keywords(): @@ -40,7 +42,7 @@ def get_config(): cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" - cfg.tag_prefix = "" + cfg.tag_prefix = "v" cfg.parentdir_prefix = "" cfg.versionfile_source = "schemacode/_version.py" cfg.verbose = False @@ -51,40 +53,36 @@ class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" -LONG_VERSION_PY = {} -HANDLERS = {} +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f - return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -96,15 +94,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): @@ -116,25 +112,18 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @@ -147,22 +136,21 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @@ -170,10 +158,14 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -186,11 +178,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -199,7 +191,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -207,30 +199,28 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -238,10 +228,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): version string, meaning we're inside a checked out source tree. """ GITS = ["git"] + TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -249,24 +242,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) + describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)], + cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() @@ -276,6 +261,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -284,16 +302,17 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] + git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) return pieces # tag @@ -302,12 +321,10 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] + pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -318,11 +335,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -353,25 +373,74 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered @@ -402,12 +471,41 @@ def render_pep440_post(pieces): return rendered +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -467,23 +565,25 @@ def render_git_describe_long(pieces): def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -493,13 +593,9 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} def get_versions(): @@ -513,7 +609,8 @@ def get_versions(): verbose = cfg.verbose try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) except NotThisMethod: pass @@ -522,16 +619,13 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split("/"): + for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) @@ -545,10 +639,6 @@ def get_versions(): except NotThisMethod: pass - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/tools/schemacode/schemacode/render.py b/tools/schemacode/schemacode/render.py index b4954d3321..47f44a2113 100644 --- a/tools/schemacode/schemacode/render.py +++ b/tools/schemacode/schemacode/render.py @@ -10,9 +10,7 @@ lgr = utils.get_logger() # Basic settings for output, for now just basic -utils.set_logger_level( - lgr, os.environ.get("BIDS_SCHEMA_LOG_LEVEL", logging.INFO) -) +utils.set_logger_level(lgr, os.environ.get("BIDS_SCHEMA_LOG_LEVEL", logging.INFO)) logging.basicConfig(format="%(asctime)-15s [%(levelname)8s] %(message)s") @@ -227,16 +225,12 @@ def make_filename_template(schema, **kwargs): string += suffix strings = [string] else: - strings = [ - string + "_" + suffix for suffix in group["suffixes"] - ] + strings = [string + "_" + suffix for suffix in group["suffixes"]] # Add extensions full_strings = [] extensions = group["extensions"] - extensions = [ - ext if ext != "*" else "." for ext in extensions - ] + extensions = [ext if ext != "*" else "." for ext in extensions] extensions = utils.combine_extensions(extensions) if len(extensions) > 5: # Combine exts when there are many, but keep JSON separate @@ -426,11 +420,7 @@ def make_suffix_table(schema, suffixes, tablefmt="github"): suffixes_found = [f for f in suffixes if f in suffix_schema.keys()] suffixes_not_found = [f for f in suffixes if f not in suffix_schema.keys()] if suffixes_not_found: - raise Exception( - "Warning: Missing suffixes: {}".format( - ", ".join(suffixes_not_found) - ) - ) + raise Exception("Warning: Missing suffixes: {}".format(", ".join(suffixes_not_found))) df = pd.DataFrame( index=suffixes_found, @@ -513,9 +503,7 @@ def make_metadata_table(schema, field_info, tablefmt="github"): type_string = utils.resolve_metadata_type(metadata_schema[field]) - description = ( - metadata_schema[field]["description"] + " " + description_addendum - ) + description = metadata_schema[field]["description"] + " " + description_addendum # A backslash before a newline means continue a string description = description.replace("\\\n", "") # Two newlines should be respected @@ -584,9 +572,7 @@ def make_columns_table(schema, column_info, tablefmt="github"): type_string = utils.resolve_metadata_type(column_schema[field]) - description = ( - column_schema[field]["description"] + " " + description_addendum - ) + description = column_schema[field]["description"] + " " + description_addendum # A backslash before a newline means continue a string description = description.replace("\\\n", "") # Two newlines should be respected diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index efb976e491..805a4f5177 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -12,9 +12,7 @@ lgr = utils.get_logger() # Basic settings for output, for now just basic -utils.set_logger_level( - lgr, os.environ.get("BIDS_SCHEMA_LOG_LEVEL", logging.INFO) -) +utils.set_logger_level(lgr, os.environ.get("BIDS_SCHEMA_LOG_LEVEL", logging.INFO)) logging.basicConfig(format="%(asctime)-15s [%(levelname)8s] %(message)s") @@ -151,9 +149,7 @@ def filter_schema(schema, **kwargs): if k in new_schema.keys(): filtered_item = deepcopy(new_schema[k]) if isinstance(filtered_item, dict): - filtered_item = { - k1: v1 for k1, v1 in filtered_item.items() if k1 in v - } + filtered_item = {k1: v1 for k1, v1 in filtered_item.items() if k1 in v} else: filtered_item = [i for i in filtered_item if i in v] new_schema[k] = filtered_item diff --git a/tools/schemacode/schemacode/utils.py b/tools/schemacode/schemacode/utils.py index d593b9ff3c..3b25b2cd61 100644 --- a/tools/schemacode/schemacode/utils.py +++ b/tools/schemacode/schemacode/utils.py @@ -176,14 +176,11 @@ def resolve_metadata_type(definition): elif "type" in definition.get("additionalProperties", {}): # Values within objects - string += " of " + get_link( - definition["additionalProperties"]["type"] + "s" - ) + string += " of " + get_link(definition["additionalProperties"]["type"] + "s") elif "anyOf" in definition: # Use dictionary to get unique substrings while preserving insertion order - substrings = {resolve_metadata_type(subdict): None - for subdict in definition["anyOf"]} + substrings = {resolve_metadata_type(subdict): None for subdict in definition["anyOf"]} string = " or ".join(substrings) diff --git a/tools/schemacode/setup.py b/tools/schemacode/setup.py index a3d81a9ce2..26ade778c2 100644 --- a/tools/schemacode/setup.py +++ b/tools/schemacode/setup.py @@ -8,12 +8,14 @@ # Give setuptools a hint to complain if it's too old a version # 30.3.0 allows us to put most metadata in setup.cfg # Should match pyproject.toml -SETUP_REQUIRES = ['setuptools >= 30.3.0'] +SETUP_REQUIRES = ["setuptools >= 30.3.0"] # This enables setuptools to install wheel on-the-fly -SETUP_REQUIRES += ['wheel'] if 'bdist_wheel' in sys.argv else [] +SETUP_REQUIRES += ["wheel"] if "bdist_wheel" in sys.argv else [] -if __name__ == '__main__': - setup(name="schemacode", - version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), - setup_requires=SETUP_REQUIRES) +if __name__ == "__main__": + setup( + name="schemacode", + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), + setup_requires=SETUP_REQUIRES, + ) diff --git a/tools/schemacode/versioneer.py b/tools/schemacode/versioneer.py index 323046a244..b4cd1d6c7c 100644 --- a/tools/schemacode/versioneer.py +++ b/tools/schemacode/versioneer.py @@ -1,4 +1,5 @@ -# Version: 0.18 + +# Version: 0.21 """The Versioneer - like a rocketeer, but for versions. @@ -6,16 +7,12 @@ ============== * like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer +* https://github.com/python-versioneer/python-versioneer * Brian Warner * License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) +* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update @@ -26,9 +23,10 @@ ## Quick Install -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) +* `pip install versioneer` to somewhere in your $PATH +* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) * run `versioneer install` in your source tree, commit the results +* Verify version information with `python setup.py version` ## Version Identifiers @@ -60,7 +58,7 @@ for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. +uncommitted changes). The version identifier is used for multiple purposes: @@ -165,7 +163,7 @@ Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). +[issues page](https://github.com/python-versioneer/python-versioneer/issues). ### Subprojects @@ -193,9 +191,9 @@ Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve @@ -223,22 +221,10 @@ cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - ## Updating Versioneer @@ -264,6 +250,14 @@ direction and include code from all supported VCS systems, reducing the number of intermediate scripts. +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer +* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools + plugin ## License @@ -273,21 +267,27 @@ Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . -""" - -from __future__ import print_function +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer -try: - import configparser -except ImportError: - import ConfigParser as configparser +""" +# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring +# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements +# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error +# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with +# pylint:disable=attribute-defined-outside-init,too-many-arguments +import configparser import errno import json import os import re import subprocess import sys +from typing import Callable, Dict class VersioneerConfig: @@ -309,13 +309,11 @@ def get_root(): setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ( - "Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND')." - ) + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools @@ -324,14 +322,12 @@ def get_root(): # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) + my_path = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(my_path)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: - print( - "Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py) - ) + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(my_path), versioneer_py)) except NameError: pass return root @@ -339,31 +335,29 @@ def get_root(): def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or + # This might raise OSError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) + parser = configparser.ConfigParser() + with open(setup_cfg, "r") as cfg_file: + parser.read_file(cfg_file) VCS = parser.get("versioneer", "VCS") # mandatory - def get(parser, name): - if parser.has_option("versioneer", name): - return parser.get("versioneer", name) - return None + # Dict-like interface for non-mandatory entries + section = parser["versioneer"] cfg = VersioneerConfig() cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") + cfg.style = section.get("style", "") + cfg.versionfile_source = section.get("versionfile_source") + cfg.versionfile_build = section.get("versionfile_build") + cfg.tag_prefix = section.get("tag_prefix") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") + cfg.parentdir_prefix = section.get("parentdir_prefix") + cfg.verbose = section.get("verbose") return cfg @@ -372,40 +366,34 @@ class NotThisMethod(Exception): # these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f + HANDLERS.setdefault(vcs, {})[method] = f return f - return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -417,20 +405,16 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode -LONG_VERSION_PY[ - "git" -] = ''' +LONG_VERSION_PY['git'] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -438,7 +422,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= # that just contains the computed version number. # This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) +# versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) """Git implementation of _version.py.""" @@ -447,6 +431,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= import re import subprocess import sys +from typing import Callable, Dict def get_keywords(): @@ -484,12 +469,12 @@ class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" -LONG_VERSION_PY = {} -HANDLERS = {} +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" + """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: @@ -503,17 +488,17 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) break - except EnvironmentError: + except OSError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue @@ -525,15 +510,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): @@ -545,15 +528,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% @@ -570,22 +552,21 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @@ -593,10 +574,14 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -609,11 +594,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d @@ -622,7 +607,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: @@ -631,6 +616,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %%s" %% r) return {"version": r, @@ -646,7 +636,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -654,11 +644,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): version string, meaning we're inside a checked out source tree. """ GITS = ["git"] + TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) @@ -666,15 +658,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) + describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", + "%%s%%s" %% (tag_prefix, TAG_PREFIX_REGEX)], + cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() @@ -684,6 +677,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -725,13 +751,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() + date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -769,19 +796,67 @@ def render_pep440(pieces): return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) + else: + rendered += ".post0.dev%%d" %% (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] + rendered = "0.post0.dev%%d" %% pieces["distance"] return rendered @@ -812,12 +887,41 @@ def render_pep440_post(pieces): return rendered +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -888,10 +992,14 @@ def render(pieces, style): if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -927,7 +1035,7 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): + for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, @@ -962,22 +1070,21 @@ def git_get_keywords(versionfile_abs): # _version.py. keywords = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @@ -985,10 +1092,14 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -1001,11 +1112,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -1014,7 +1125,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -1022,30 +1133,28 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -1053,10 +1162,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): version string, meaning we're inside a checked out source tree. """ GITS = ["git"] + TAG_PREFIX_REGEX = "*" if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] + TAG_PREFIX_REGEX = r"\*" - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -1064,24 +1176,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) + describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)], + cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() @@ -1091,6 +1195,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -1099,16 +1236,17 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] + git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) return pieces # tag @@ -1117,12 +1255,10 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] + pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -1133,13 +1269,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -1158,27 +1295,26 @@ def do_vcs_install(manifest_in, versionfile_source, ipy): if ipy: files.append(ipy) try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) + my_path = __file__ + if my_path.endswith(".pyc") or my_path.endswith(".pyo"): + my_path = os.path.splitext(my_path)[0] + ".py" + versioneer_file = os.path.relpath(my_path) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: + with open(".gitattributes", "r") as fobj: + for line in fobj: + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + break + except OSError: pass if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() + with open(".gitattributes", "a+") as fobj: + fobj.write(f"{versionfile_source} export-subst\n") files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) @@ -1192,30 +1328,23 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from +# This file was generated by 'versioneer.py' (0.21) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. @@ -1237,15 +1366,13 @@ def versions_from_file(filename): try: with open(filename) as f: contents = f.read() - except EnvironmentError: + except OSError: raise NotThisMethod("unable to read _version.py") - mo = re.search( - r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) if not mo: - mo = re.search( - r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) @@ -1254,7 +1381,8 @@ def versions_from_file(filename): def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) @@ -1286,25 +1414,74 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered @@ -1335,12 +1512,41 @@ def render_pep440_post(pieces): return rendered +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -1400,23 +1606,25 @@ def render_git_describe_long(pieces): def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -1426,13 +1634,9 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} class VersioneerBadRootError(Exception): @@ -1455,9 +1659,8 @@ def get_versions(verbose=False): handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose - assert ( - cfg.versionfile_source is not None - ), "please set versioneer.versionfile_source" + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) @@ -1511,13 +1714,9 @@ def get_versions(verbose=False): if verbose: print("unable to compute version") - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} def get_version(): @@ -1525,8 +1724,12 @@ def get_version(): return get_versions()["version"] -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" +def get_cmdclass(cmdclass=None): + """Get the custom setuptools/distutils subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and @@ -1540,9 +1743,9 @@ def get_cmdclass(): # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - cmds = {} + cmds = {} if cmdclass is None else cmdclass.copy() # we add "version" to both distutils and setuptools from distutils.core import Command @@ -1566,7 +1769,6 @@ def run(self): print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools @@ -1585,7 +1787,9 @@ def run(self): # setup.py egg_info -> ? # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: + if 'build_py' in cmds: + _build_py = cmds['build_py'] + elif "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py @@ -1599,15 +1803,41 @@ def run(self): # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py + if 'build_ext' in cmds: + _build_ext = cmds['build_ext'] + elif "setuptools" in sys.modules: + from setuptools.command.build_ext import build_ext as _build_ext + else: + from distutils.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_ext"] = cmd_build_ext + if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ @@ -1628,25 +1858,18 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] - if "py2exe" in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 + if 'py2exe' in sys.modules: # py2exe enabled? + from py2exe.distutils_buildexe import py2exe as _py2exe class cmd_py2exe(_py2exe): def run(self): @@ -1661,21 +1884,19 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: + if 'sdist' in cmds: + _sdist = cmds['sdist'] + elif "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist @@ -1698,10 +1919,8 @@ def make_release_tree(self, base_dir, files): # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) - write_to_version_file( - target_versionfile, self._versioneer_generated_versions - ) - + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds @@ -1744,25 +1963,28 @@ def make_release_tree(self, base_dir, files): """ -INIT_PY_SNIPPET = """ +OLD_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ +INIT_PY_SNIPPET = """ +from . import {0} +__version__ = {0}.get_versions()['version'] +""" + def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" + """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) - except ( - EnvironmentError, - configparser.NoSectionError, - configparser.NoOptionError, - ) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", file=sys.stderr) + except (OSError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (OSError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) @@ -1771,28 +1993,31 @@ def do_setup(): print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() - except EnvironmentError: + except OSError: old = "" - if INIT_PY_SNIPPET not in old: + module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] + snippet = INIT_PY_SNIPPET.format(module) + if OLD_SNIPPET in old: + print(" replacing boilerplate in %s" % ipy) + with open(ipy, "w") as f: + f.write(old.replace(OLD_SNIPPET, snippet)) + elif snippet not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) + f.write(snippet) else: print(" %s unmodified" % ipy) else: @@ -1811,7 +2036,7 @@ def do_setup(): if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) - except EnvironmentError: + except OSError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so @@ -1824,10 +2049,8 @@ def do_setup(): else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: - print( - " appending versionfile_source ('%s') to MANIFEST.in" - % cfg.versionfile_source - ) + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: From f766804af5cc5d30d9b5e2dc84e6716f27f60934 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 16 Dec 2021 14:36:34 -0500 Subject: [PATCH 25/36] Add numpy back in as a general dependency. --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 92fa48536f..edc1538e9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,5 @@ mkdocs-material>=5.4 pymdown-extensions>=7.0.0 mkdocs-branchcustomization-plugin~=0.1.3 mkdocs-macros-plugin +numpy tools/schemacode/ From 181df00a3510c8a21d848679dca2e4a8d23b0d38 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 17 Dec 2021 19:10:49 -0500 Subject: [PATCH 26/36] Apply suggestions from code review Co-authored-by: Yaroslav Halchenko --- .github/workflows/schemacode_ci.yml | 5 ++++- tools/schemacode/setup.cfg | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index e654a2218e..edc003dc62 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -2,6 +2,9 @@ name: "schemacode_ci" on: push: + paths: + - 'tools/schemacode/**' + - 'src/schema/**' branches: - "master" pull_request: @@ -31,7 +34,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest"] - python-version: ["3.8"] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] name: ${{ matrix.os }} with Python ${{ matrix.python-version }} defaults: run: diff --git a/tools/schemacode/setup.cfg b/tools/schemacode/setup.cfg index 59d4889d92..2d42c7a46f 100644 --- a/tools/schemacode/setup.cfg +++ b/tools/schemacode/setup.cfg @@ -16,6 +16,7 @@ classifiers = Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 [options] python_requires = >=3.6 From 2557616c52fa8499571e4a7b3c8c0b4e534c282a Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 17 Dec 2021 19:17:41 -0500 Subject: [PATCH 27/36] Add path restriction to pull requests too. --- .github/workflows/schemacode_ci.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index edc003dc62..d883519ee4 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -2,14 +2,17 @@ name: "schemacode_ci" on: push: - paths: - - 'tools/schemacode/**' - - 'src/schema/**' branches: - "master" + paths: + - "tools/schemacode/**" + - "src/schema/**" pull_request: branches: - "*" + paths: + - "tools/schemacode/**" + - "src/schema/**" jobs: check_skip: From e19f1031bca9736e457a4bc36f10b6fab57e72e3 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Fri, 17 Dec 2021 19:44:51 -0500 Subject: [PATCH 28/36] Add license badge. --- tools/schemacode/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/schemacode/README.md b/tools/schemacode/README.md index 0781fd5e8d..ad8cccb51d 100644 --- a/tools/schemacode/README.md +++ b/tools/schemacode/README.md @@ -2,4 +2,5 @@ A Python library for working with the BIDS schema. +[![License](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) [![codecov](https://codecov.io/gh/bids-standard/bids-specification/branch/master/graph/badge.svg)](https://codecov.io/gh/bids-standard/bids-specification) From cc5ed8e3dd12398fd261db00c5615cd8ebc7d242 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 18 Dec 2021 11:11:03 -0500 Subject: [PATCH 29/36] Try out linting job. --- .github/workflows/schemacode_ci.yml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index d883519ee4..e830c167e2 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -29,7 +29,7 @@ jobs: commit-filter: "[skip ci];[ci skip];[skip github]" commit-filter-separator: ";" - latest: + run_tests: needs: check_skip if: ${{ needs.check_skip.outputs.skip == 'false' }} runs-on: ${{ matrix.os }} @@ -68,3 +68,18 @@ jobs: - name: "Upload coverage to CodeCov" uses: codecov/codecov-action@v1 if: success() + + flake8-lint: + runs-on: ubuntu-latest + name: Lint + steps: + - name: Check out source repository + uses: actions/checkout@v2 + - name: Set up Python environment + uses: actions/setup-python@v1 + with: + python-version: "3.8" + - name: Flake8 Lint + uses: py-actions/flake8@v2 + with: + path: "tools/schemacode/" From a71a323f25135ba3640066c74d3629298bd308a3 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 18 Dec 2021 11:11:30 -0500 Subject: [PATCH 30/36] Fix name. --- .github/workflows/schemacode_ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index e830c167e2..37e5fc965a 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -71,7 +71,7 @@ jobs: flake8-lint: runs-on: ubuntu-latest - name: Lint + name: Lint schemacode steps: - name: Check out source repository uses: actions/checkout@v2 From ce53a66197808a5d178ccd2de963f8b890bb0512 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 18 Dec 2021 11:16:16 -0500 Subject: [PATCH 31/36] Don't use the flake8 action. --- .github/workflows/schemacode_ci.yml | 20 ++++++++++++++------ tools/schemacode/setup.cfg | 7 ++++--- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index 37e5fc965a..8779e7a653 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -75,11 +75,19 @@ jobs: steps: - name: Check out source repository uses: actions/checkout@v2 + - name: Set up Python environment - uses: actions/setup-python@v1 - with: - python-version: "3.8" - - name: Flake8 Lint - uses: py-actions/flake8@v2 + uses: actions/setup-python@v2 with: - path: "tools/schemacode/" + python-version: "3.7" + + - name: "Install the schemacode package" + shell: bash {0} + run: | + python -m pip install --progress-bar off --upgrade pip setuptools wheel + python -m pip install -e ./tools/schemacode[tests] + + - name: "Run flake8" + shell: bash {0} + run: | + flake8 ./tools/schemacode/ diff --git a/tools/schemacode/setup.cfg b/tools/schemacode/setup.cfg index 2d42c7a46f..5b8b09a0f0 100644 --- a/tools/schemacode/setup.cfg +++ b/tools/schemacode/setup.cfg @@ -60,10 +60,11 @@ parentdir_prefix = [flake8] max-line-length = 99 -exclude=*build/ -ignore = E203,E402,W503 +exclude = *build/ +ignore = E203,E402,E722,W503 per-file-ignores = - */__init__.py:F401 + */__init__.py : F401 +docstring-convention = numpy [tool:pytest] log_cli = true From 712a1f624f4bf7d04713240cec2204780c9d22ce Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 18 Dec 2021 11:20:33 -0500 Subject: [PATCH 32/36] Change working directory to use config. --- .github/workflows/schemacode_ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/schemacode_ci.yml b/.github/workflows/schemacode_ci.yml index 8779e7a653..51f0334e5a 100644 --- a/.github/workflows/schemacode_ci.yml +++ b/.github/workflows/schemacode_ci.yml @@ -88,6 +88,7 @@ jobs: python -m pip install -e ./tools/schemacode[tests] - name: "Run flake8" + working-directory: ./tools/schemacode/ shell: bash {0} run: | - flake8 ./tools/schemacode/ + flake8 . From cf9b6b7b5d483d2cb4e9e36f3d81c47e7460648d Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 18 Dec 2021 11:23:59 -0500 Subject: [PATCH 33/36] Run black on versioneer files. --- tools/schemacode/schemacode/_version.py | 154 ++++++++------ tools/schemacode/versioneer.py | 266 ++++++++++++++---------- 2 files changed, 245 insertions(+), 175 deletions(-) diff --git a/tools/schemacode/schemacode/_version.py b/tools/schemacode/schemacode/_version.py index 537930fbb8..773b0f3174 100644 --- a/tools/schemacode/schemacode/_version.py +++ b/tools/schemacode/schemacode/_version.py @@ -1,4 +1,3 @@ - # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -59,17 +58,18 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f + return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) process = None @@ -77,10 +77,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, try: dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) break except OSError: e = sys.exc_info()[1] @@ -115,15 +118,21 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @@ -182,7 +191,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -191,7 +200,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} + tags = {r for r in refs if re.search(r"\d", r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -199,24 +208,31 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] + r = ref[len(tag_prefix) :] # Filter out refs that exactly match prefix or that don't start # with a number once the prefix is stripped (mostly a concern # when prefix is '') - if not re.match(r'\d', r): + if not re.match(r"\d", r): continue if verbose: print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } @register_vcs_handler("git", "pieces_from_vcs") @@ -233,8 +249,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): GITS = ["git.cmd", "git.exe"] TAG_PREFIX_REGEX = r"\*" - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -242,11 +257,19 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", - "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)], - cwd=root) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), + ], + cwd=root, + ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") @@ -261,8 +284,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) # --abbrev-ref was added in git-1.6.3 if rc != 0 or branch_name is None: raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") @@ -302,17 +324,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] + git_describe = git_describe[: git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces # tag @@ -321,10 +342,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] + pieces["closest-tag"] = full_tag[len(tag_prefix) :] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -373,8 +393,7 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered @@ -403,8 +422,7 @@ def render_pep440_branch(pieces): rendered = "0" if pieces["branch"] != "master": rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered @@ -432,7 +450,7 @@ def render_pep440_pre(pieces): tag_version, post_version = pep440_split_post(pieces["closest-tag"]) rendered = tag_version if post_version is not None: - rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) else: rendered += ".post0.dev%d" % (pieces["distance"]) else: @@ -565,11 +583,13 @@ def render_git_describe_long(pieces): def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } if not style or style == "default": style = "pep440" # the default @@ -593,9 +613,13 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } def get_versions(): @@ -609,8 +633,7 @@ def get_versions(): verbose = cfg.verbose try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass @@ -619,13 +642,16 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): + for _ in cfg.versionfile_source.split("/"): root = os.path.dirname(root) except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) @@ -639,6 +665,10 @@ def get_versions(): except NotThisMethod: pass - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/tools/schemacode/versioneer.py b/tools/schemacode/versioneer.py index b4cd1d6c7c..2da2029331 100644 --- a/tools/schemacode/versioneer.py +++ b/tools/schemacode/versioneer.py @@ -1,4 +1,3 @@ - # Version: 0.21 """The Versioneer - like a rocketeer, but for versions. @@ -309,11 +308,13 @@ def get_root(): setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") + err = ( + "Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND')." + ) raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools @@ -326,8 +327,10 @@ def get_root(): me_dir = os.path.normcase(os.path.splitext(my_path)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) + print( + "Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(my_path), versioneer_py) + ) except NameError: pass return root @@ -372,15 +375,16 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): """Store f in HANDLERS[vcs][method].""" HANDLERS.setdefault(vcs, {})[method] = f return f + return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) process = None @@ -388,10 +392,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, try: dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) break except OSError: e = sys.exc_info()[1] @@ -414,7 +421,9 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, return stdout, process.returncode -LONG_VERSION_PY['git'] = r''' +LONG_VERSION_PY[ + "git" +] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -1116,7 +1125,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -1125,7 +1134,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} + tags = {r for r in refs if re.search(r"\d", r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -1133,24 +1142,31 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] + r = ref[len(tag_prefix) :] # Filter out refs that exactly match prefix or that don't start # with a number once the prefix is stripped (mostly a concern # when prefix is '') - if not re.match(r'\d', r): + if not re.match(r"\d", r): continue if verbose: print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } @register_vcs_handler("git", "pieces_from_vcs") @@ -1167,8 +1183,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): GITS = ["git.cmd", "git.exe"] TAG_PREFIX_REGEX = r"\*" - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -1176,11 +1191,19 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", - "%s%s" % (tag_prefix, TAG_PREFIX_REGEX)], - cwd=root) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), + ], + cwd=root, + ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") @@ -1195,8 +1218,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) # --abbrev-ref was added in git-1.6.3 if rc != 0 or branch_name is None: raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") @@ -1236,17 +1258,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] + git_describe = git_describe[: git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces # tag @@ -1255,10 +1276,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] + pieces["closest-tag"] = full_tag[len(tag_prefix) :] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -1331,15 +1351,21 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @@ -1368,11 +1394,9 @@ def versions_from_file(filename): contents = f.read() except OSError: raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) @@ -1381,8 +1405,7 @@ def versions_from_file(filename): def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) + contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) @@ -1414,8 +1437,7 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered @@ -1444,8 +1466,7 @@ def render_pep440_branch(pieces): rendered = "0" if pieces["branch"] != "master": rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered @@ -1473,7 +1494,7 @@ def render_pep440_pre(pieces): tag_version, post_version = pep440_split_post(pieces["closest-tag"]) rendered = tag_version if post_version is not None: - rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) else: rendered += ".post0.dev%d" % (pieces["distance"]) else: @@ -1606,11 +1627,13 @@ def render_git_describe_long(pieces): def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } if not style or style == "default": style = "pep440" # the default @@ -1634,9 +1657,13 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } class VersioneerBadRootError(Exception): @@ -1659,8 +1686,7 @@ def get_versions(verbose=False): handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" + assert cfg.versionfile_source is not None, "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) @@ -1714,9 +1740,13 @@ def get_versions(verbose=False): if verbose: print("unable to compute version") - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } def get_version(): @@ -1769,6 +1799,7 @@ def run(self): print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools @@ -1787,8 +1818,8 @@ def run(self): # setup.py egg_info -> ? # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] + if "build_py" in cmds: + _build_py = cmds["build_py"] elif "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: @@ -1803,14 +1834,14 @@ def run(self): # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) + target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] + if "build_ext" in cmds: + _build_ext = cmds["build_ext"] elif "setuptools" in sys.modules: from setuptools.command.build_ext import build_ext as _build_ext else: @@ -1830,14 +1861,15 @@ def run(self): return # now locate _version.py in the new build/ directory and replace # it with an updated value - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) + target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) + cmds["build_ext"] = cmd_build_ext if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ @@ -1858,17 +1890,21 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + cmds["build_exe"] = cmd_build_exe del cmds["build_py"] - if 'py2exe' in sys.modules: # py2exe enabled? + if "py2exe" in sys.modules: # py2exe enabled? from py2exe.distutils_buildexe import py2exe as _py2exe class cmd_py2exe(_py2exe): @@ -1884,18 +1920,22 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] + if "sdist" in cmds: + _sdist = cmds["sdist"] elif "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: @@ -1919,8 +1959,8 @@ def make_release_tree(self, base_dir, files): # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) + write_to_version_file(target_versionfile, self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist return cmds @@ -1980,11 +2020,9 @@ def do_setup(): root = get_root() try: cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: + except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) + print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) @@ -1993,15 +2031,18 @@ def do_setup(): print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: @@ -2049,8 +2090,7 @@ def do_setup(): else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) + print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: From e4d3f0881f507118fcb6a234586c06e33b154ef6 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Sat, 18 Dec 2021 11:29:52 -0500 Subject: [PATCH 34/36] Fix linting issue. --- tools/schemacode/schemacode/render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/schemacode/schemacode/render.py b/tools/schemacode/schemacode/render.py index 47f44a2113..b1de890e60 100644 --- a/tools/schemacode/schemacode/render.py +++ b/tools/schemacode/schemacode/render.py @@ -379,7 +379,7 @@ def make_entity_table(schema, tablefmt="github", **kwargs): def _remove_numeric_suffixes(string): import re - suffix_str = re.findall("\((.+)\)", string) + suffix_str = re.findall(r"\\((.+)\\)", string) # The "Format" row should be skipped if not suffix_str: return string From adeeeedb9072ce18f5b3b405fdf2f6ac8fe57400 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Wed, 5 Jan 2022 12:32:30 -0500 Subject: [PATCH 35/36] Apply @effigies' suggestions Thanks for the suggestions! Co-authored-by: Chris Markiewicz --- tools/schemacode/schemacode/render.py | 2 +- tools/schemacode/schemacode/schema.py | 42 +++++++++------------------ 2 files changed, 14 insertions(+), 30 deletions(-) diff --git a/tools/schemacode/schemacode/render.py b/tools/schemacode/schemacode/render.py index b1de890e60..db802d8db9 100644 --- a/tools/schemacode/schemacode/render.py +++ b/tools/schemacode/schemacode/render.py @@ -379,7 +379,7 @@ def make_entity_table(schema, tablefmt="github", **kwargs): def _remove_numeric_suffixes(string): import re - suffix_str = re.findall(r"\\((.+)\\)", string) + suffix_str = re.findall(r"\((.+)\)", string) # The "Format" row should be skipped if not suffix_str: return string diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index 805a4f5177..d3975ab59a 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -78,39 +78,23 @@ def load_schema(schema_path): schema["rules"] = {} # Load object definitions. All are present in single files. - object_group_files = sorted(glob(str(objects_dir / "*.yaml"))) - for object_group_file in object_group_files: - group_name = op.splitext(op.basename(object_group_file))[0] - lgr.debug(f"Loading {group_name} objects.") - with open(object_group_file, "r") as fo: - dict_ = yaml.load(fo, Loader=yaml.SafeLoader) - dict_ = dereference_yaml(dict_, dict_) - schema["objects"][group_name] = dict_ + for object_group_file in sorted(objects_dir.glob("*.yaml")): + lgr.debug(f"Loading {object_group_file.stem} objects.") + dict_ = yaml.safe_load(object_group_file.read_text()) + schema["objects"][object_group_file.stem] = dereference_yaml(dict_, dict_) # Grab single-file rule groups - rule_group_files = sorted(glob(str(rules_dir / "*.yaml"))) - rule_group_folders = sorted(glob(str(rules_dir / "*"))) - rule_group_folders = [f for f in rule_group_folders if op.isdir(f)] - for rule_group_file in rule_group_files: - group_name = op.splitext(op.basename(rule_group_file))[0] - lgr.debug(f"Loading {group_name} rules.") - with open(rule_group_file, "r") as fo: - dict_ = yaml.load(fo, Loader=yaml.SafeLoader) - dict_ = dereference_yaml(dict_, dict_) - schema["rules"][group_name] = dict_ + for rule_group_file in sorted(rules_dir.glob("*.yaml")): + lgr.debug(f"Loading {rule_group_file.stem} rules.") + dict_ = yaml.safe_load(rule_group_file.read_text()) + schema["rules"][rule_group_file.stem] = dereference_yaml(dict_, dict_) # Load folders of rule subgroups. - for rule_group_folder in rule_group_folders: - group_name = op.basename(rule_group_folder) - rule_subgroup_files = sorted(glob(op.join(rule_group_folder, "*.yaml"))) - schema["rules"][group_name] = {} - for rule_subgroup_file in rule_subgroup_files: - subgroup_name = op.splitext(op.basename(rule_subgroup_file))[0] - lgr.debug(f"Loading {subgroup_name} rules.") - with open(rule_subgroup_file, "r") as fo: - dict_ = yaml.load(fo, Loader=yaml.SafeLoader) - dict_ = dereference_yaml(dict_, dict_) - schema["rules"][group_name][subgroup_name] = dict_ + for rule_group_file in sorted(rules_dir.glob("*/*.yaml")): + rule = schema["rules"].setdefault(rule_group_file.parent.name, {}) + lgr.debug(f"Loading {rule_group_file.stem} rules.") + dict_ = yaml.safe_load(rule_group_file.read_text()) + rule[rule_group_file.stem] = dereference_yaml(dict_, dict_) return schema From 97794d56b45ec6b3ffa61ad6a670d17382ae7fdc Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Wed, 5 Jan 2022 12:34:15 -0500 Subject: [PATCH 36/36] Remove unused imports. --- tools/schemacode/schemacode/schema.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tools/schemacode/schemacode/schema.py b/tools/schemacode/schemacode/schema.py index d3975ab59a..9203eaa921 100644 --- a/tools/schemacode/schemacode/schema.py +++ b/tools/schemacode/schemacode/schema.py @@ -1,9 +1,7 @@ """Schema loading- and processing-related functions.""" import logging import os -import os.path as op from copy import deepcopy -from glob import glob from pathlib import Path import yaml