diff --git a/.azure/gpu-integrations.yml b/.azure/gpu-integrations.yml index 73353304dee..8182deeb84f 100644 --- a/.azure/gpu-integrations.yml +++ b/.azure/gpu-integrations.yml @@ -66,16 +66,16 @@ jobs: pip install -q packaging fire requests wget python -m wget https://raw.githubusercontent.com/Lightning-AI/utilities/main/scripts/adjust-torch-versions.py python adjust-torch-versions.py requirements/base.txt $(torch-ver) - python adjust-torch-versions.py requirements/integrate.txt $(torch-ver) + python adjust-torch-versions.py requirements/_integrate.txt $(torch-ver) # FixMe: this shall not be for all integrations/cases - python .github/assistant.py set-oldest-versions --req_files='["requirements/integrate.txt"]' - cat requirements/integrate.txt + python .github/assistant.py set-oldest-versions --req_files='["requirements/_integrate.txt"]' + cat requirements/_integrate.txt displayName: "Adjust versions" - bash: | - pip install -q -r requirements/integrate.txt + pip install -q -r requirements/_integrate.txt # force reinstall TM as it could be overwritten by integration's dependencies - pip install . -U -r requirements/test.txt --find-links ${TORCH_URL} + pip install . -U -r requirements/_tests.txt --find-links ${TORCH_URL} displayName: "Install package & integrations" - bash: | diff --git a/.azure/gpu-unittests.yml b/.azure/gpu-unittests.yml index 39713d21790..d1c589aeba6 100644 --- a/.azure/gpu-unittests.yml +++ b/.azure/gpu-unittests.yml @@ -89,7 +89,7 @@ jobs: displayName: "Adjust versions" - bash: | - pip install . -U -r ./requirements/devel.txt --prefer-binary --find-links=${TORCH_URL} + pip install . -U -r ./requirements/_devel.txt --prefer-binary --find-links=${TORCH_URL} displayName: "Install environment" - bash: | diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 26ce322d691..f707722c658 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -17,8 +17,8 @@ RUN \ # trying to resolve pesq installation issue pip3 install -q "numpy<1.24" && \ pip3 --disable-pip-version-check --no-cache-dir install \ - -r /tmp/pip-tmp/requirements/devel.txt \ - -r /tmp/pip-tmp/requirements/docs.txt \ + -r /tmp/pip-tmp/requirements/_devel.txt \ + -r /tmp/pip-tmp/requirements/_docs.txt \ --find-links="https://download.pytorch.org/whl/cpu/torch_stable.html" \ --find-links="dist/" && \ rm -rf /tmp/pip-tmp diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 6f19a6ab96a..f170b93c2c1 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -134,7 +134,7 @@ When you send a PR the continuous integration will run tests and build the docs. To setup a local development environment, install both local and test dependencies: ```bash -python -m pip install -r requirements/test.txt +python -m pip install -r requirements/_tests.txt python -m pip install pre-commit ``` diff --git a/.github/actions/push-caches/action.yml b/.github/actions/push-caches/action.yml index f6f79e9b55d..d7ae92abca0 100644 --- a/.github/actions/push-caches/action.yml +++ b/.github/actions/push-caches/action.yml @@ -41,7 +41,7 @@ runs: - name: Dump wheels run: | - pip wheel -r requirements/devel.txt --prefer-binary \ + pip wheel -r requirements/_devel.txt --prefer-binary \ --wheel-dir=.pip-wheels \ -f ${{ inputs.torch-url }} -f ${{ inputs.pypi-dir }} ls -lh .pip-wheels diff --git a/.github/workflows/ci-checks.yml b/.github/workflows/ci-checks.yml index 68c652ba50f..848ea789a35 100644 --- a/.github/workflows/ci-checks.yml +++ b/.github/workflows/ci-checks.yml @@ -15,8 +15,8 @@ jobs: check-code: uses: Lightning-AI/utilities/.github/workflows/check-code.yml@v0.9.0 with: - actions-ref: main - extra-typing: typing + actions-ref: v0.9.0 + extra-typing: "typing" check-schema: uses: Lightning-AI/utilities/.github/workflows/check-schema.yml@v0.9.0 @@ -25,7 +25,7 @@ jobs: if: github.event.pull_request.draft == false uses: Lightning-AI/utilities/.github/workflows/check-package.yml@v0.9.0 with: - actions-ref: main + actions-ref: v0.9.0 artifact-name: dist-packages-${{ github.sha }} import-name: "torchmetrics" testing-matrix: | diff --git a/.github/workflows/ci-integrate.yml b/.github/workflows/ci-integrate.yml index 16b295818d3..fc9f05f4ac6 100644 --- a/.github/workflows/ci-integrate.yml +++ b/.github/workflows/ci-integrate.yml @@ -63,7 +63,7 @@ jobs: run: | set -e curl https://raw.githubusercontent.com/Lightning-AI/utilities/main/scripts/adjust-torch-versions.py -o adjust-torch-versions.py - pip install -r requirements/test.txt -r requirements/integrate.txt \ + pip install -r requirements/_tests.txt -r requirements/_integrate.txt \ --find-links $PYTORCH_URL -f $PYPI_CACHE --upgrade-strategy eager python adjust-torch-versions.py requirements/base.txt python adjust-torch-versions.py requirements/image.txt diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index f250defa112..b73982287b9 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -92,7 +92,7 @@ jobs: run: | pip --version pip install -e . -U --find-links $PYTORCH_URL -f $PYPI_CACHE - pip install -r requirements/doctest.txt -U -f $PYPI_CACHE + pip install -r requirements/_doctest.txt -U -f $PYPI_CACHE pip list # todo: copy this to install checks @@ -116,7 +116,7 @@ jobs: for fpath in `ls requirements/*.txt`; do python adjust-torch-versions.py $fpath done - pip install --requirement requirements/devel.txt -U \ + pip install --requirement requirements/_devel.txt -U \ --find-links $PYTORCH_URL -f $PYPI_CACHE pip list diff --git a/.github/workflows/docs-build.yml b/.github/workflows/docs-build.yml index 530a1c1dea0..7f8f5eedbcb 100644 --- a/.github/workflows/docs-build.yml +++ b/.github/workflows/docs-build.yml @@ -64,7 +64,7 @@ jobs: sudo apt-get update --fix-missing sudo apt-get install -y cmake pip --version - pip install . -U -r requirements/docs.txt \ + pip install . -U -r requirements/_docs.txt \ --find-links="${PYPI_CACHE}" --find-links="${TORCH_URL}" --find-links="dist/" pip list diff --git a/.readthedocs.yml b/.readthedocs.yml index 215a2506733..e797ae385fe 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -34,7 +34,7 @@ build: - pip install -U pip awscli --user - python -m awscli s3 sync --no-sign-request s3://sphinx-packages/ dist/ ; ls -lh dist/ - > - pip install -e . -q -r requirements/docs.txt \ + pip install -e . -q -r requirements/_docs.txt \ -f 'https://download.pytorch.org/whl/cpu/torch_stable.html' -f dist/ ; pip list # this need to be split so `sphinx-build` is picked from previous installation diff --git a/Makefile b/Makefile index 004f5f5a625..9e4b7029b90 100644 --- a/Makefile +++ b/Makefile @@ -28,12 +28,12 @@ test: clean env data cd tests && python -m coverage report docs: clean - pip install -e . --quiet -r requirements/docs.txt + pip install -e . --quiet -r requirements/_docs.txt # apt-get install -y texlive-latex-extra dvipng texlive-pictures texlive-fonts-recommended cm-super TOKENIZERS_PARALLELISM=false python -m sphinx -b html -W --keep-going docs/source docs/build env: - pip install -e . -U -r requirements/devel.txt + pip install -e . -U -r requirements/_devel.txt data: python -c "from urllib.request import urlretrieve ; urlretrieve('https://pl-public-data.s3.amazonaws.com/metrics/data.zip', 'data.zip')" diff --git a/dockers/ubuntu-cuda/Dockerfile b/dockers/ubuntu-cuda/Dockerfile index ea8ab92ac45..2d8b4569b43 100644 --- a/dockers/ubuntu-cuda/Dockerfile +++ b/dockers/ubuntu-cuda/Dockerfile @@ -84,7 +84,7 @@ RUN \ pip install -q "numpy<1.24" && \ CUDA_VERSION_MM=${CUDA_VERSION%.*} && \ CU_VERSION_MM=${CUDA_VERSION_MM//'.'/''} && \ - pip install --no-cache-dir -r requirements/devel.txt \ + pip install --no-cache-dir -r requirements/_devel.txt \ --find-links "https://download.pytorch.org/whl/cu${CU_VERSION_MM}/torch_stable.html" && \ rm -rf requirements/ diff --git a/docs/source/conf.py b/docs/source/conf.py index 31ba5d1b81a..fbf0ba506f7 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -321,7 +321,7 @@ def package_list_from_file(file): MOCK_PACKAGES = [] if SPHINX_MOCK_REQUIREMENTS: # mock also base packages when we are on RTD since we don't install them there - MOCK_PACKAGES += package_list_from_file(os.path.join(_PATH_ROOT, "requirements", "docs.txt")) + MOCK_PACKAGES += package_list_from_file(os.path.join(_PATH_ROOT, "requirements", "_docs.txt")) MOCK_PACKAGES = [PACKAGE_MAPPING.get(pkg, pkg) for pkg in MOCK_PACKAGES] autodoc_mock_imports = MOCK_PACKAGES diff --git a/requirements/README.md b/requirements/README.md new file mode 100644 index 00000000000..2e6f9cd3c11 --- /dev/null +++ b/requirements/README.md @@ -0,0 +1,28 @@ +# Project Requirements + +This folder contains all requirements files for the project. The base requirements are located in the `base.txt` file. +Files prefixed with `_` are only meant for development and testing purposes. In general, each subdomain of the project +has a `.txt` file that contains the necessary requirements for using that subdomain and a `_test.txt` +file that contains the necessary requirements for testing that subdomain. + +To install all extra requirements such that all tests can be run, use the following command: + +```bash +pip install -r requirements/_devel.txt # unittests +pip install -r requiremnets/_integrate.txt # integration tests + +``` + +To install all extra requirements so that the documentation can be built, use the following command: + +```bash +pip install -r requirements/_docs.txt +# OR just run `make docs` +``` + +## CI/CD upper bounds automation + +For CI stability, we have set for all package versions' upper bounds (the latest version), so with any sudden release, +we won't put our development on fire. Dependabot manages the continuous updates of these upper bounds. +Note that these upper bounds are lifters when installing a package from the source or as a package. +If you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment. diff --git a/requirements/devel.txt b/requirements/_devel.txt similarity index 95% rename from requirements/devel.txt rename to requirements/_devel.txt index a6cb48591ca..6a80916918a 100644 --- a/requirements/devel.txt +++ b/requirements/_devel.txt @@ -2,7 +2,7 @@ -r base.txt # add the testing dependencies --r test.txt +-r _tests.txt # add extra requirements -r image.txt diff --git a/requirements/docs.txt b/requirements/_docs.txt similarity index 96% rename from requirements/docs.txt rename to requirements/_docs.txt index fbb656f864f..93eacf64bb5 100644 --- a/requirements/docs.txt +++ b/requirements/_docs.txt @@ -16,7 +16,7 @@ lightning-utilities >=0.9.0, <0.10.0 pydantic > 1.0.0, < 3.0.0 # integrations --r integrate.txt +-r _integrate.txt -r visual.txt -r audio.txt -r detection.txt diff --git a/requirements/doctest.txt b/requirements/_doctest.txt similarity index 100% rename from requirements/doctest.txt rename to requirements/_doctest.txt diff --git a/requirements/integrate.txt b/requirements/_integrate.txt similarity index 100% rename from requirements/integrate.txt rename to requirements/_integrate.txt diff --git a/requirements/test.txt b/requirements/_tests.txt similarity index 100% rename from requirements/test.txt rename to requirements/_tests.txt diff --git a/setup.py b/setup.py index 65f457da2d4..968c32d65c3 100755 --- a/setup.py +++ b/setup.py @@ -161,13 +161,21 @@ def _load_py_module(fname: str, pkg: str = "torchmetrics"): BASE_REQUIREMENTS = _load_requirements(path_dir=_PATH_REQUIRE, file_name="base.txt") -def _prepare_extras( - skip_files: Tuple[str] = ("base.txt", "devel.txt", "doctest.txt", "integrate.txt", "docs.txt") -) -> dict: +def _prepare_extras(skip_pattern: str = "^_", skip_files: Tuple[str] = ("base.txt",)) -> dict: + """Preparing extras for the package listing requirements. + + Args: + skip_pattern: ignore files with this pattern, by default all files starting with _ + skip_files: ignore some additional files, by default base requirements + + Note, particular domain test requirement are aggregated in single "_tests" extra (which is not accessible). + + """ # find all extra requirements _load_req = partial(_load_requirements, path_dir=_PATH_REQUIRE) found_req_files = sorted(os.path.basename(p) for p in glob.glob(os.path.join(_PATH_REQUIRE, "*.txt"))) # filter unwanted files + found_req_files = [n for n in found_req_files if not re.match(skip_pattern, n)] found_req_files = [n for n in found_req_files if n not in skip_files] found_req_names = [os.path.splitext(req)[0] for req in found_req_files] # define basic and extra extras