Skip to content

Commit

Permalink
Merge pull request #11 from GlacioHack/main
Browse files Browse the repository at this point in the history
merge with latest xdem
  • Loading branch information
liuh886 authored Mar 25, 2024
2 parents 354680a + 48bbf17 commit 345bec4
Show file tree
Hide file tree
Showing 82 changed files with 8,188 additions and 4,594 deletions.
7 changes: 7 additions & 0 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
<!-- Feel free to remove check-list items that aren't relevant to your change -->

- [ ] Resolves #xxx,
- [ ] Tests added, otherwise issue #xxx opened,
- [ ] Fully documented, including `api/*.md` for new API,
- [ ] New optional dependencies added to both `dev-environment.yml` and `setup.cfg`,
- [ ] If contributor workflow (test, doc, linting) or Python version support changed, update `CONTRIBUTING.md`.
8 changes: 8 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every week
interval: "weekly"
53 changes: 0 additions & 53 deletions .github/get_yml_env_nopy.py

This file was deleted.

149 changes: 149 additions & 0 deletions .github/scripts/generate_pip_deps_from_conda.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
#!/usr/bin/env python3
"""
(Copied from pandas: https://github.com/pandas-dev/pandas/blob/main/scripts/generate_pip_deps_from_conda.py)
Convert the conda environment.yml to the pip requirements-dev.txt,
or check that they have the same packages (for the CI)
Usage:
Generate `requirements-dev.txt`
$ python scripts/generate_pip_deps_from_conda.py
Compare and fail (exit status != 0) if `requirements-dev.txt` has not been
generated with this script:
$ python scripts/generate_pip_deps_from_conda.py --compare
"""
import argparse
import pathlib
import re
import sys

if sys.version_info >= (3, 11):
import tomllib
else:
import tomli as tomllib
import yaml

EXCLUDE = {"python"}
REMAP_VERSION = {"tzdata": "2022.1"}
RENAME = {}


def conda_package_to_pip(package: str):
"""
Convert a conda package to its pip equivalent.
In most cases they are the same, those are the exceptions:
- Packages that should be excluded (in `EXCLUDE`)
- Packages that should be renamed (in `RENAME`)
- A package requiring a specific version, in conda is defined with a single
equal (e.g. ``pandas=1.0``) and in pip with two (e.g. ``pandas==1.0``)
"""
package = re.sub("(?<=[^<>])=", "==", package).strip()
print(package)

for compare in ("<=", ">=", "=="):
if compare in package:
pkg, version = package.split(compare)
if pkg in EXCLUDE:
return
if pkg in REMAP_VERSION:
return "".join((pkg, compare, REMAP_VERSION[pkg]))
if pkg in RENAME:
return "".join((RENAME[pkg], compare, version))

if package in EXCLUDE:
return

if package in RENAME:
return RENAME[package]

return package


def generate_pip_from_conda(conda_path: pathlib.Path, pip_path: pathlib.Path, compare: bool = False) -> bool:
"""
Generate the pip dependencies file from the conda file, or compare that
they are synchronized (``compare=True``).
Parameters
----------
conda_path : pathlib.Path
Path to the conda file with dependencies (e.g. `environment.yml`).
pip_path : pathlib.Path
Path to the pip file with dependencies (e.g. `requirements-dev.txt`).
compare : bool, default False
Whether to generate the pip file (``False``) or to compare if the
pip file has been generated with this script and the last version
of the conda file (``True``).
Returns
-------
bool
True if the comparison fails, False otherwise
"""
with conda_path.open() as file:
deps = yaml.safe_load(file)["dependencies"]

pip_deps = []
for dep in deps:
if isinstance(dep, str):
conda_dep = conda_package_to_pip(dep)
if conda_dep:
pip_deps.append(conda_dep)
elif isinstance(dep, dict) and len(dep) == 1 and "pip" in dep:
# If pulled directly from GitHub (temporary CI passing),
# such as git+https://github.com/GlacioHack/geoutils.git,
# rename to the package repo name
dep_pips = dep["pip"]
for dep_pip in dep_pips:
if "+" in dep_pip and dep_pip.split("+")[0] == "git":
dep_pip = dep_pip.split("/")[-1].split(".git")[0]
pip_deps.append(dep_pip)
else:
raise ValueError(f"Unexpected dependency {dep}")

header = (
f"# This file is auto-generated from {conda_path.name}, do not modify.\n"
"# See that file for comments about the need/usage of each dependency.\n\n"
)
pip_content = header + "\n".join(pip_deps) + "\n"

# Add setuptools to requirements-dev.txt
# with open(pathlib.Path(conda_path.parent, "pyproject.toml"), "rb") as fd:
# meta = tomllib.load(fd)
# for requirement in meta["build-system"]["requires"]:
# if "setuptools" in requirement:
# pip_content += requirement
# pip_content += "\n"

if compare:
with pip_path.open() as file:
return pip_content != file.read()

with pip_path.open("w") as file:
file.write(pip_content)
return False


if __name__ == "__main__":
argparser = argparse.ArgumentParser(description="convert (or compare) conda file to pip")
argparser.add_argument(
"--compare",
action="store_true",
help="compare whether the two files are equivalent",
)
args = argparser.parse_args()

conda_fname = "environment.yml"
pip_fname = "requirements.txt"
repo_path = pathlib.Path(__file__).parent.parent.parent.absolute()
res = generate_pip_from_conda(
pathlib.Path(repo_path, conda_fname),
pathlib.Path(repo_path, pip_fname),
compare=args.compare,
)
if res:
msg = f"`{pip_fname}` has to be generated with `{__file__}` after " f"`{conda_fname}` is modified.\n"
sys.stderr.write(msg)
sys.exit(res)
54 changes: 54 additions & 0 deletions .github/scripts/generate_yml_env_fixed_py.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
from __future__ import annotations

import argparse

import yaml # type: ignore


def environment_yml_nopy(fn_env: str, py_version: str, add_deps: list[str] = None) -> None:
"""
Generate temporary environment-py3.XX.yml files forcing python versions for setup of continuous integration.
:param fn_env: Filename path to environment.yml
:param py_version: Python version to force.
:param add_deps: Additional dependencies to solve for directly (for instance graphviz fails with mamba update).
"""

# Load the yml as dictionary
yaml_env = yaml.safe_load(open(fn_env))
conda_dep_env = list(yaml_env["dependencies"])

# Force python version
conda_dep_env_forced_py = ["python=" + py_version if "python" in dep else dep for dep in conda_dep_env]

# Optionally, add other dependencies
if add_deps is not None:
conda_dep_env_forced_py.extend(add_deps)

# Copy back to new yaml dict
yaml_out = yaml_env.copy()
yaml_out["dependencies"] = conda_dep_env_forced_py

with open("environment-ci-py" + py_version + ".yml", "w") as outfile:
yaml.dump(yaml_out, outfile, default_flow_style=False)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate environment files for CI with fixed python versions.")
parser.add_argument("fn_env", metavar="fn_env", type=str, help="Path to the generic environment file.")
parser.add_argument(
"--pyv",
dest="py_version",
default="3.9",
type=str,
help="List of Python versions to force.",
)
parser.add_argument(
"--add",
dest="add_deps",
default=None,
type=str,
help="List of dependencies to add.",
)
args = parser.parse_args()
environment_yml_nopy(fn_env=args.fn_env, py_version=args.py_version, add_deps=args.add_deps.split(","))
6 changes: 3 additions & 3 deletions .github/workflows/pre-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: pre-commit/action@v2.0.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: pre-commit/action@v3.0.1
16 changes: 10 additions & 6 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries

name: pypi
name: Upload package to PyPI

on:
release:
Expand All @@ -20,19 +20,23 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: '3.x'
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -U setuptools wheel build twine --user
pip install setuptools setuptools_scm wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
# Build package, test pip install works, then upload to PyPI with twine
run: |
python -m build
python setup.py sdist bdist_wheel
pip install dist/*.tar.gz
twine upload dist/*
Original file line number Diff line number Diff line change
Expand Up @@ -17,60 +17,56 @@ jobs:
strategy:
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
python-version: ["3.8", "3.9", "3.10"]
python-version: ["3.9", "3.10", "3.11"]

# Run all shells using bash (including Windows)
defaults:
run:
shell: bash -l {0}

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4

# We initiate the environment empty, and check if a key for this environment doesn't already exist in the cache
- name: Initiate empty environment
uses: conda-incubator/setup-miniconda@v2
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-variant: Mambaforge
miniforge-version: latest
auto-update-conda: true
use-mamba: true
channel-priority: strict
activate-environment: xdem-dev
python-version:

- name: Get month for resetting cache
id: get-date
run: echo "cache_date=$(/bin/date -u '+%Y%m')" >> $GITHUB_ENV
shell: bash

- name: Cache conda env
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ${{ env.CONDA }}/envs
key: conda-${{ matrix.os }}-${{ matrix.python-version }}-${{ env.cache_date }}-${{ hashFiles('dev-environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
CACHE_NUMBER: 0 # Increase this value to reset cache if environment.yml has not changed
id: cache

# The trick below is necessary because the generic environment file does not specify a Python version, and only
# "conda env update" can be used to update with an environment file, which upgrades the Python version
# The trick below is necessary because the generic environment file does not specify a Python version, and ONLY
# "conda env update" CAN BE USED WITH CACHING, which upgrades the Python version when using the base environment
# (we add "graphviz" from dev-environment to solve all dependencies at once, at graphviz relies on image
# processing packages very much like geo-packages; not a problem for docs, dev installs where all is done at once)
- name: Install base environment with a fixed Python version
if: steps.cache.outputs.cache-hit != 'true'
run: |
mamba install pyyaml python=${{ matrix.python-version }}
pkgs_conda_base=`python .github/get_yml_env_nopy.py "environment.yml" --p "conda"`
pkgs_pip_base=`python .github/get_yml_env_nopy.py "environment.yml" --p "pip"`
mamba install python=${{ matrix.python-version }} $pkgs_conda_base graphviz
if [[ "$pkgs_pip_base" != "None" ]]; then
pip install $pkgs_pip_base
fi
python .github/scripts/generate_yml_env_fixed_py.py --pyv ${{ matrix.python-version }} --add "graphviz,opencv,pytransform3d" "environment.yml"
mamba env update -n xdem-dev -f environment-ci-py${{ matrix.python-version }}.yml
- name: Install project
run: pip install -e . --no-dependencies


# This steps allows us to check the "import xdem" with the base environment provided to users, before adding
# development-specific dependencies by differencing the env and dev-env yml files
- name: Check import works with base environment
Expand Down
Loading

0 comments on commit 345bec4

Please sign in to comment.