Skip to content

Commit

Permalink
Merge branch 'main' of github.com:keller-mark/scanpy into keller-mark…
Browse files Browse the repository at this point in the history
…/densmap-2
  • Loading branch information
keller-mark committed Dec 10, 2024
2 parents 603afb3 + 3f329bb commit a2bbcf8
Show file tree
Hide file tree
Showing 164 changed files with 3,363 additions and 2,284 deletions.
12 changes: 9 additions & 3 deletions .azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@ jobs:
vmImage: 'ubuntu-22.04'
strategy:
matrix:
Python3.9:
python.version: '3.9'
Python3.10:
python.version: '3.10'
Python3.12: {}
minimal_dependencies:
TEST_EXTRA: 'test-min'
anndata_dev:
DEPENDENCIES_VERSION: "pre-release"
TEST_TYPE: "coverage"
minimum_versions:
python.version: '3.9'
python.version: '3.10'
DEPENDENCIES_VERSION: "minimum-version"
TEST_TYPE: "coverage"

Expand Down Expand Up @@ -103,6 +103,12 @@ jobs:
testResultsFormat: NUnit
testRunTitle: 'Publish test results for $(Agent.JobName)'

- task: PublishBuildArtifacts@1
inputs:
pathToPublish: '.pytest_cache/d/debug'
artifactName: debug-data
condition: eq(variables['TEST_TYPE'], 'coverage')

- script: bash <(curl -s https://codecov.io/bash)
displayName: 'Upload to codecov.io'
condition: eq(variables['TEST_TYPE'], 'coverage')
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@ jobs:
key: benchmark-state-${{ hashFiles('benchmarks/**') }}

- name: Install dependencies
# TODO: revert once this PR is merged: https://github.com/airspeed-velocity/asv/pull/1397
run: pip install 'asv @ git+https://github.com/ivirshup/asv@fix-conda-usage'
run: pip install 'asv>=0.6.4'

- name: Configure ASV
working-directory: ${{ env.ASV_DIR }}
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/check-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,13 @@ jobs:
with:
fetch-depth: 0
filter: blob:none
- name: Find out if relevant release notes are modified
uses: dorny/paths-filter@v2
- name: Find out if a relevant release fragment is added
uses: dorny/paths-filter@v3
id: changes
with:
filters: | # this is intentionally a string
relnotes: 'docs/release-notes/${{ github.event.pull_request.milestone.title }}.md'
- name: Check if relevant release notes are modified
relnotes: 'docs/release-notes/${{ github.event.pull_request.number }}.*.md'
- name: Check if a relevant release fragment is added
uses: flying-sheep/check@v1
with:
success: ${{ steps.changes.outputs.relnotes }}
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
/tests/**/*failed-diff.png

# Environment management
/hatch.toml
/Pipfile
/Pipfile.lock
/requirements*.lock
Expand All @@ -29,6 +28,7 @@
# Python build files
__pycache__/
/src/scanpy/_version.py
/ci/scanpy-min-deps.txt
/dist/
/*-env/
/env-*/
Expand Down
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.6
rev: v0.8.2
hooks:
- id: ruff
types_or: [python, pyi, jupyter]
Expand All @@ -20,7 +20,7 @@ repos:
- --sort-by-bibkey
- --drop=abstract
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: trailing-whitespace
exclude: tests/_data
Expand Down
10 changes: 9 additions & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,16 @@ version: 2
submodules:
include: all
build:
os: ubuntu-20.04
os: ubuntu-24.04
tools:
python: '3.12'
jobs:
post_checkout:
# unshallow so version can be derived from tag
- git fetch --unshallow || true
pre_build:
# run towncrier to preview the next version’s release notes
- ( find docs/release-notes -regex '[^.]+[.][^.]+.md' | grep -q . ) && towncrier build --keep || true
sphinx:
fail_on_warning: true # do not change or you will be fired
configuration: docs/conf.py
Expand All @@ -14,4 +21,5 @@ python:
path: .
extra_requirements:
- doc
- dev # for towncrier
- leiden
2 changes: 1 addition & 1 deletion benchmarks/asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@

// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
// "pythons": ["3.9", "3.12"],
// "pythons": ["3.10", "3.12"],

// The list of conda channel names to be searched for benchmark
// dependency packages in the specified order
Expand Down
7 changes: 4 additions & 3 deletions benchmarks/benchmarks/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,16 @@
import scanpy as sc

if TYPE_CHECKING:
from collections.abc import Callable, Sequence, Set
from collections.abc import Callable, Sequence
from collections.abc import Set as AbstractSet
from typing import Literal, Protocol, TypeVar

from anndata import AnnData

C = TypeVar("C", bound=Callable)

class ParamSkipper(Protocol):
def __call__(self, **skipped: Set) -> Callable[[C], C]: ...
def __call__(self, **skipped: AbstractSet) -> Callable[[C], C]: ...

Dataset = Literal["pbmc68k_reduced", "pbmc3k", "bmmc", "lung93k"]
KeyX = Literal[None, "off-axis"]
Expand Down Expand Up @@ -195,7 +196,7 @@ def param_skipper(
b 5
"""

def skip(**skipped: Set) -> Callable[[C], C]:
def skip(**skipped: AbstractSet) -> Callable[[C], C]:
skipped_combs = [
tuple(record.values())
for record in (
Expand Down
55 changes: 43 additions & 12 deletions ci/scripts/min-deps.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
#!python3
#!/usr/bin/env python3
# /// script
# dependencies = [
# "tomli; python_version < '3.11'",
# "packaging",
# ]
# ///

from __future__ import annotations

import argparse
import sys
from collections import deque
from contextlib import ExitStack
from pathlib import Path
from typing import TYPE_CHECKING

Expand All @@ -16,7 +24,7 @@
from packaging.version import Version

if TYPE_CHECKING:
from collections.abc import Generator, Iterable
from collections.abc import Generator, Iterable, Sequence


def min_dep(req: Requirement) -> Requirement:
Expand All @@ -27,18 +35,21 @@ def min_dep(req: Requirement) -> Requirement:
-------
>>> min_dep(Requirement("numpy>=1.0"))
"numpy==1.0"
<Requirement('numpy==1.0.*')>
"""
req_name = req.name
if req.extras:
req_name = f"{req_name}[{','.join(req.extras)}]"

if not req.specifier:
filter_specs = [
spec for spec in req.specifier if spec.operator in {"==", "~=", ">=", ">"}
]
if not filter_specs:
return Requirement(req_name)

min_version = Version("0.0.0.a1")
for spec in req.specifier:
if spec.operator in [">", ">=", "~="]:
for spec in filter_specs:
if spec.operator in {">", ">=", "~="}:
min_version = max(min_version, Version(spec.version))
elif spec.operator == "==":
min_version = Version(spec.version)
Expand All @@ -65,12 +76,19 @@ def extract_min_deps(
yield min_dep(req)


def main():
class Args(argparse.Namespace):
path: Path
output: Path | None
extras: list[str]


def main(argv: Sequence[str] | None = None) -> None:
parser = argparse.ArgumentParser(
prog="min-deps",
description="""Parse a pyproject.toml file and output a list of minimum dependencies.
Output is directly passable to `pip install`.""",
description=(
"Parse a pyproject.toml file and output a list of minimum dependencies. "
"Output is optimized for `[uv] pip install` (see `-o`/`--output` for details)."
),
usage="pip install `python min-deps.py pyproject.toml`",
)
parser.add_argument(
Expand All @@ -79,8 +97,18 @@ def main():
parser.add_argument(
"--extras", type=str, nargs="*", default=(), help="extras to install"
)
parser.add_argument(
*("--output", "-o"),
type=Path,
default=None,
help=(
"output file (default: stdout). "
"Without this option, output is space-separated for direct passing to `pip install`. "
"With this option, output written to a file newline-separated file usable as `requirements.txt` or `constraints.txt`."
),
)

args = parser.parse_args()
args = parser.parse_args(argv, Args())

pyproject = tomllib.loads(args.path.read_text())

Expand All @@ -92,7 +120,10 @@ def main():

min_deps = extract_min_deps(deps, pyproject=pyproject)

print(" ".join(map(str, min_deps)))
sep = "\n" if args.output else " "
with ExitStack() as stack:
f = stack.enter_context(args.output.open("w")) if args.output else sys.stdout
print(sep.join(map(str, min_deps)), file=f)


if __name__ == "__main__":
Expand Down
111 changes: 111 additions & 0 deletions ci/scripts/towncrier_automation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
#!/usr/bin/env python3
# /// script
# dependencies = [ "towncrier", "packaging" ]
# ///

from __future__ import annotations

import argparse
import subprocess
from typing import TYPE_CHECKING

from packaging.version import Version

if TYPE_CHECKING:
from collections.abc import Sequence


class Args(argparse.Namespace):
version: str
dry_run: bool


def parse_args(argv: Sequence[str] | None = None) -> Args:
parser = argparse.ArgumentParser(
prog="towncrier-automation",
description=(
"This script runs towncrier for a given version, "
"creates a branch off of the current one, "
"and then creates a PR into the original branch with the changes. "
"The PR will be backported to main if the current branch is not main."
),
)
parser.add_argument(
"version",
type=str,
help=(
"The new version for the release must have at least three parts, like `major.minor.patch` and no `major.minor`. "
"It can have a suffix like `major.minor.patch.dev0` or `major.minor.0rc1`."
),
)
parser.add_argument(
"--dry-run",
help="Whether or not to dry-run the actual creation of the pull request",
action="store_true",
)
args = parser.parse_args(argv, Args())
# validate the version
if len(Version(args.version).release) != 3:
msg = f"Version argument {args.version} must contain major, minor, and patch version."
raise ValueError(msg)
return args


def main(argv: Sequence[str] | None = None) -> None:
args = parse_args(argv)

# Run towncrier
subprocess.run(
["towncrier", "build", f"--version={args.version}", "--yes"], check=True
)

# Check if we are on the main branch to know if we need to backport
base_branch = subprocess.run(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
capture_output=True,
text=True,
check=True,
).stdout.strip()
pr_description = "" if base_branch == "main" else "@meeseeksdev backport to main"
branch_name = f"release_notes_{args.version}"

# Create a new branch + commit
subprocess.run(["git", "switch", "-c", branch_name], check=True)
subprocess.run(["git", "add", "docs/release-notes"], check=True)
pr_title = f"(chore): generate {args.version} release notes"
subprocess.run(["git", "commit", "-m", pr_title], check=True)

# push
if not args.dry_run:
subprocess.run(
["git", "push", "--set-upstream", "origin", branch_name], check=True
)
else:
print("Dry run, not pushing")

# Create a PR
subprocess.run(
[
"gh",
"pr",
"create",
f"--base={base_branch}",
f"--title={pr_title}",
f"--body={pr_description}",
*(["--label=no milestone"] if base_branch == "main" else []),
*(["--dry-run"] if args.dry_run else []),
],
check=True,
)

# Enable auto-merge
if not args.dry_run:
subprocess.run(
["gh", "pr", "merge", branch_name, "--auto", "--squash"], check=True
)
else:
print("Dry run, not merging")


if __name__ == "__main__":
main()
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
"scanpydoc", # needs to be before sphinx.ext.linkcode
"sphinx.ext.linkcode",
"sphinx_design",
"sphinx_tabs.tabs",
"sphinx_search.extension",
"sphinxext.opengraph",
*[p.stem for p in (HERE / "extensions").glob("*.py") if p.stem not in {"git_ref"}],
Expand Down
Loading

0 comments on commit a2bbcf8

Please sign in to comment.