diff --git a/.azure-pipelines/azure-pipelines-linux.yml b/.azure-pipelines/azure-pipelines-linux.yml index ba3d0e95..8588ed8e 100755 --- a/.azure-pipelines/azure-pipelines-linux.yml +++ b/.azure-pipelines/azure-pipelines-linux.yml @@ -12,63 +12,79 @@ jobs: CONFIG: linux_64_python3.10.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_64_python3.10.____cpython linux_64_python3.11.____cpython: CONFIG: linux_64_python3.11.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_64_python3.11.____cpython linux_64_python3.8.____cpython: CONFIG: linux_64_python3.8.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_64_python3.8.____cpython linux_64_python3.9.____73_pypy: CONFIG: linux_64_python3.9.____73_pypy UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_64_python3.9.____73_pypy linux_64_python3.9.____cpython: CONFIG: linux_64_python3.9.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_64_python3.9.____cpython linux_aarch64_python3.10.____cpython: CONFIG: linux_aarch64_python3.10.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_aarch64_python3.10.____cpython linux_aarch64_python3.11.____cpython: CONFIG: linux_aarch64_python3.11.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_aarch64_python3.11.____cpython linux_aarch64_python3.8.____cpython: CONFIG: linux_aarch64_python3.8.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_aarch64_python3.8.____cpython linux_aarch64_python3.9.____73_pypy: CONFIG: linux_aarch64_python3.9.____73_pypy UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_aarch64_python3.9.____73_pypy linux_aarch64_python3.9.____cpython: CONFIG: linux_aarch64_python3.9.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_aarch64_python3.9.____cpython linux_ppc64le_python3.10.____cpython: CONFIG: linux_ppc64le_python3.10.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_ppc64le_python3.10.____cpython linux_ppc64le_python3.11.____cpython: CONFIG: linux_ppc64le_python3.11.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_ppc64le_python3.11.____cpython linux_ppc64le_python3.8.____cpython: CONFIG: linux_ppc64le_python3.8.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_ppc64le_python3.8.____cpython linux_ppc64le_python3.9.____73_pypy: CONFIG: linux_ppc64le_python3.9.____73_pypy UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_ppc64le_python3.9.____73_pypy linux_ppc64le_python3.9.____cpython: CONFIG: linux_ppc64le_python3.9.____cpython UPLOAD_PACKAGES: 'True' DOCKER_IMAGE: quay.io/condaforge/linux-anvil-cos7-x86_64 + SHORT_CONFIG: linux_ppc64le_python3.9.____cpython timeoutInMinutes: 360 + variables: {} steps: # configure qemu binfmt-misc running. This allows us to run docker containers @@ -96,4 +112,33 @@ jobs: env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) + - script: | + export CI=azure + export CI_RUN_ID=$(build.BuildNumber).$(system.JobAttempt) + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + export CONDA_BLD_DIR=build_artifacts + export ARTIFACT_STAGING_DIR="$(Build.ArtifactStagingDirectory)" + # Archive everything in CONDA_BLD_DIR except environments + export BLD_ARTIFACT_PREFIX=conda_artifacts + if [[ "$AGENT_JOBSTATUS" == "Failed" ]]; then + # Archive the CONDA_BLD_DIR environments only when the job fails + export ENV_ARTIFACT_PREFIX=conda_envs + fi + ./.scripts/create_conda_build_artifacts.sh + displayName: Prepare conda build artifacts + condition: succeededOrFailed() + + - task: PublishPipelineArtifact@1 + displayName: Store conda build artifacts + condition: not(eq(variables.BLD_ARTIFACT_PATH, '')) + inputs: + targetPath: $(BLD_ARTIFACT_PATH) + artifactName: $(BLD_ARTIFACT_NAME) + + - task: PublishPipelineArtifact@1 + displayName: Store conda build environment artifacts + condition: not(eq(variables.ENV_ARTIFACT_PATH, '')) + inputs: + targetPath: $(ENV_ARTIFACT_PATH) + artifactName: $(ENV_ARTIFACT_NAME) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-osx.yml b/.azure-pipelines/azure-pipelines-osx.yml index 35c22fb6..178475a6 100755 --- a/.azure-pipelines/azure-pipelines-osx.yml +++ b/.azure-pipelines/azure-pipelines-osx.yml @@ -11,31 +11,41 @@ jobs: osx_64_python3.10.____cpython: CONFIG: osx_64_python3.10.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_64_python3.10.____cpython osx_64_python3.11.____cpython: CONFIG: osx_64_python3.11.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_64_python3.11.____cpython osx_64_python3.8.____cpython: CONFIG: osx_64_python3.8.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_64_python3.8.____cpython osx_64_python3.9.____73_pypy: CONFIG: osx_64_python3.9.____73_pypy UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_64_python3.9.____73_pypy osx_64_python3.9.____cpython: CONFIG: osx_64_python3.9.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_64_python3.9.____cpython osx_arm64_python3.10.____cpython: CONFIG: osx_arm64_python3.10.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_arm64_python3.10.____cpython osx_arm64_python3.11.____cpython: CONFIG: osx_arm64_python3.11.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_arm64_python3.11.____cpython osx_arm64_python3.8.____cpython: CONFIG: osx_arm64_python3.8.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_arm64_python3.8.____cpython osx_arm64_python3.9.____cpython: CONFIG: osx_arm64_python3.9.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: osx_arm64_python3.9.____cpython timeoutInMinutes: 360 + variables: {} steps: # TODO: Fast finish on azure pipelines? @@ -57,4 +67,33 @@ jobs: env: BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) + - script: | + export CI=azure + export CI_RUN_ID=$(build.BuildNumber).$(system.JobAttempt) + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + export CONDA_BLD_DIR=/Users/runner/miniforge3/conda-bld + export ARTIFACT_STAGING_DIR="$(Build.ArtifactStagingDirectory)" + # Archive everything in CONDA_BLD_DIR except environments + export BLD_ARTIFACT_PREFIX=conda_artifacts + if [[ "$AGENT_JOBSTATUS" == "Failed" ]]; then + # Archive the CONDA_BLD_DIR environments only when the job fails + export ENV_ARTIFACT_PREFIX=conda_envs + fi + ./.scripts/create_conda_build_artifacts.sh + displayName: Prepare conda build artifacts + condition: succeededOrFailed() + + - task: PublishPipelineArtifact@1 + displayName: Store conda build artifacts + condition: not(eq(variables.BLD_ARTIFACT_PATH, '')) + inputs: + targetPath: $(BLD_ARTIFACT_PATH) + artifactName: $(BLD_ARTIFACT_NAME) + + - task: PublishPipelineArtifact@1 + displayName: Store conda build environment artifacts + condition: not(eq(variables.ENV_ARTIFACT_PATH, '')) + inputs: + targetPath: $(ENV_ARTIFACT_PATH) + artifactName: $(ENV_ARTIFACT_NAME) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-win.yml b/.azure-pipelines/azure-pipelines-win.yml index 61c70c6e..a3935b5e 100755 --- a/.azure-pipelines/azure-pipelines-win.yml +++ b/.azure-pipelines/azure-pipelines-win.yml @@ -11,18 +11,23 @@ jobs: win_64_python3.10.____cpython: CONFIG: win_64_python3.10.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: win_64_python3.10.____cpython win_64_python3.11.____cpython: CONFIG: win_64_python3.11.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: win_64_python3.11.____cpython win_64_python3.8.____cpython: CONFIG: win_64_python3.8.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: win_64_python3.8.____cpython win_64_python3.9.____73_pypy: CONFIG: win_64_python3.9.____73_pypy UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: win_64_python3.9.____73_pypy win_64_python3.9.____cpython: CONFIG: win_64_python3.9.____cpython UPLOAD_PACKAGES: 'True' + SHORT_CONFIG: win_64_python3.9.____cpython timeoutInMinutes: 360 variables: CONDA_BLD_PATH: D:\\bld\\ @@ -61,4 +66,31 @@ jobs: UPLOAD_TEMP: $(UPLOAD_TEMP) BINSTAR_TOKEN: $(BINSTAR_TOKEN) FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) - STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) + - script: | + set CI=azure + set CI_RUN_ID=$(build.BuildNumber).$(system.JobAttempt) + set FEEDSTOCK_NAME=$(build.Repository.Name) + set ARTIFACT_STAGING_DIR=$(Build.ArtifactStagingDirectory) + set CONDA_BLD_DIR=$(CONDA_BLD_PATH) + set BLD_ARTIFACT_PREFIX=conda_artifacts + if "%AGENT_JOBSTATUS%" == "Failed" ( + set ENV_ARTIFACT_PREFIX=conda_envs + ) + call ".scripts\create_conda_build_artifacts.bat" + displayName: Prepare conda build artifacts + condition: succeededOrFailed() + + - task: PublishPipelineArtifact@1 + displayName: Store conda build artifacts + condition: not(eq(variables.BLD_ARTIFACT_PATH, '')) + inputs: + targetPath: $(BLD_ARTIFACT_PATH) + artifactName: $(BLD_ARTIFACT_NAME) + + - task: PublishPipelineArtifact@1 + displayName: Store conda build environment artifacts + condition: not(eq(variables.ENV_ARTIFACT_PATH, '')) + inputs: + targetPath: $(ENV_ARTIFACT_PATH) + artifactName: $(ENV_ARTIFACT_NAME) \ No newline at end of file diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh index beda247e..899ba03c 100755 --- a/.scripts/build_steps.sh +++ b/.scripts/build_steps.sh @@ -34,9 +34,9 @@ CONDARC export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 mamba install --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip mamba conda-build boa conda-forge-ci-setup=4 + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" mamba update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip mamba conda-build boa conda-forge-ci-setup=4 + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" # set up the condarc setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" @@ -68,7 +68,7 @@ if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then # Drop into an interactive shell /bin/bash else - conda mambabuild "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ + conda-build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ --suppress-variables ${EXTRA_CB_OPTIONS:-} \ --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" \ --extra-meta flow_run_id="${flow_run_id:-}" remote_url="${remote_url:-}" sha="${sha:-}" diff --git a/.scripts/create_conda_build_artifacts.bat b/.scripts/create_conda_build_artifacts.bat new file mode 100755 index 00000000..2853cfdc --- /dev/null +++ b/.scripts/create_conda_build_artifacts.bat @@ -0,0 +1,80 @@ +setlocal enableextensions enabledelayedexpansion + +rem INPUTS (environment variables that need to be set before calling this script): +rem +rem CI (azure/github_actions/UNSET) +rem CI_RUN_ID (unique identifier for the CI job run) +rem FEEDSTOCK_NAME +rem CONFIG (build matrix configuration string) +rem SHORT_CONFIG (uniquely-shortened configuration string) +rem CONDA_BLD_DIR (path to the conda-bld directory) +rem ARTIFACT_STAGING_DIR (use working directory if unset) +rem BLD_ARTIFACT_PREFIX (prefix for the conda build artifact name, skip if unset) +rem ENV_ARTIFACT_PREFIX (prefix for the conda build environments artifact name, skip if unset) + +rem OUTPUTS +rem +rem BLD_ARTIFACT_NAME +rem BLD_ARTIFACT_PATH +rem ENV_ARTIFACT_NAME +rem ENV_ARTIFACT_PATH + +rem Check that the conda-build directory exists +if not exist %CONDA_BLD_DIR% ( + echo conda-build directory does not exist + exit 1 +) + +if not defined ARTIFACT_STAGING_DIR ( + rem Set staging dir to the working dir + set ARTIFACT_STAGING_DIR=%cd% +) + +rem Set a unique ID for the artifact(s), specialized for this particular job run +set ARTIFACT_UNIQUE_ID=%CI_RUN_ID%_%CONFIG% +if not "%ARTIFACT_UNIQUE_ID%" == "%ARTIFACT_UNIQUE_ID:~0,80%" ( + set ARTIFACT_UNIQUE_ID=%CI_RUN_ID%_%SHORT_CONFIG% +) + +rem Set a descriptive ID for the archive(s), specialized for this particular job run +set ARCHIVE_UNIQUE_ID=%CI_RUN_ID%_%CONFIG% + +rem Make the build artifact zip +if defined BLD_ARTIFACT_PREFIX ( + set BLD_ARTIFACT_NAME=%BLD_ARTIFACT_PREFIX%_%ARTIFACT_UNIQUE_ID% + echo BLD_ARTIFACT_NAME: !BLD_ARTIFACT_NAME! + + set "BLD_ARTIFACT_PATH=%ARTIFACT_STAGING_DIR%\%FEEDSTOCK_NAME%_%BLD_ARTIFACT_PREFIX%_%ARCHIVE_UNIQUE_ID%.zip" + 7z a "!BLD_ARTIFACT_PATH!" "%CONDA_BLD_DIR%" -xr^^!.git/ -xr^^!_*_env*/ -xr^^!*_cache/ -bb + if errorlevel 1 exit 1 + echo BLD_ARTIFACT_PATH: !BLD_ARTIFACT_PATH! + + if "%CI%" == "azure" ( + echo ##vso[task.setVariable variable=BLD_ARTIFACT_NAME]!BLD_ARTIFACT_NAME! + echo ##vso[task.setVariable variable=BLD_ARTIFACT_PATH]!BLD_ARTIFACT_PATH! + ) + if "%CI%" == "github_actions" ( + echo BLD_ARTIFACT_NAME=!BLD_ARTIFACT_NAME!>> !GITHUB_OUTPUT! + echo BLD_ARTIFACT_PATH=!BLD_ARTIFACT_PATH!>> !GITHUB_OUTPUT! + ) +) + +rem Make the environments artifact zip +if defined ENV_ARTIFACT_PREFIX ( + set ENV_ARTIFACT_NAME=!ENV_ARTIFACT_PREFIX!_%ARTIFACT_UNIQUE_ID% + echo ENV_ARTIFACT_NAME: !ENV_ARTIFACT_NAME! + + set "ENV_ARTIFACT_PATH=%ARTIFACT_STAGING_DIR%\%FEEDSTOCK_NAME%_%ENV_ARTIFACT_PREFIX%_%ARCHIVE_UNIQUE_ID%.zip" + 7z a "!ENV_ARTIFACT_PATH!" -r "%CONDA_BLD_DIR%"/_*_env*/ -bb + if errorlevel 1 exit 1 + echo ENV_ARTIFACT_PATH: !ENV_ARTIFACT_PATH! + + if "%CI%" == "azure" ( + echo ##vso[task.setVariable variable=ENV_ARTIFACT_NAME]!ENV_ARTIFACT_NAME! + echo ##vso[task.setVariable variable=ENV_ARTIFACT_PATH]!ENV_ARTIFACT_PATH! + ) + if "%CI%" == "github_actions" ( + echo ENV_ARTIFACT_NAME=!ENV_ARTIFACT_NAME!>> !GITHUB_OUTPUT! + echo ENV_ARTIFACT_PATH=!ENV_ARTIFACT_PATH!>> !GITHUB_OUTPUT! + ) +) \ No newline at end of file diff --git a/.scripts/create_conda_build_artifacts.sh b/.scripts/create_conda_build_artifacts.sh new file mode 100755 index 00000000..17ec0868 --- /dev/null +++ b/.scripts/create_conda_build_artifacts.sh @@ -0,0 +1,113 @@ +#!/usr/bin/env bash + +# INPUTS (environment variables that need to be set before calling this script): +# +# CI (azure/github_actions/UNSET) +# CI_RUN_ID (unique identifier for the CI job run) +# FEEDSTOCK_NAME +# CONFIG (build matrix configuration string) +# SHORT_CONFIG (uniquely-shortened configuration string) +# CONDA_BLD_DIR (path to the conda-bld directory) +# ARTIFACT_STAGING_DIR (use working directory if unset) +# BLD_ARTIFACT_PREFIX (prefix for the conda build artifact name, skip if unset) +# ENV_ARTIFACT_PREFIX (prefix for the conda build environments artifact name, skip if unset) + +# OUTPUTS +# +# BLD_ARTIFACT_NAME +# BLD_ARTIFACT_PATH +# ENV_ARTIFACT_NAME +# ENV_ARTIFACT_PATH + +source .scripts/logging_utils.sh + +# DON'T do set -x, because it results in double echo-ing pipeline commands +# and that might end up inserting extraneous quotation marks in output variables +set -e + +# Check that the conda-build directory exists +if [ ! -d "$CONDA_BLD_DIR" ]; then + echo "conda-build directory does not exist" + exit 1 +fi + +# Set staging dir to the working dir, in Windows style if applicable +if [[ -z "${ARTIFACT_STAGING_DIR}" ]]; then + if pwd -W; then + ARTIFACT_STAGING_DIR=$(pwd -W) + else + ARTIFACT_STAGING_DIR=$PWD + fi +fi +echo "ARTIFACT_STAGING_DIR: $ARTIFACT_STAGING_DIR" + +FEEDSTOCK_ROOT=$(cd "$(dirname "$0")/.."; pwd;) +if [ -z ${FEEDSTOCK_NAME} ]; then + export FEEDSTOCK_NAME=$(basename ${FEEDSTOCK_ROOT}) +fi + +# Set a unique ID for the artifact(s), specialized for this particular job run +ARTIFACT_UNIQUE_ID="${CI_RUN_ID}_${CONFIG}" +if [[ ${#ARTIFACT_UNIQUE_ID} -gt 80 ]]; then + ARTIFACT_UNIQUE_ID="${CI_RUN_ID}_${SHORT_CONFIG}" +fi +echo "ARTIFACT_UNIQUE_ID: $ARTIFACT_UNIQUE_ID" + +# Set a descriptive ID for the archive(s), specialized for this particular job run +ARCHIVE_UNIQUE_ID="${CI_RUN_ID}_${CONFIG}" + +# Make the build artifact zip +if [[ ! -z "$BLD_ARTIFACT_PREFIX" ]]; then + export BLD_ARTIFACT_NAME="${BLD_ARTIFACT_PREFIX}_${ARTIFACT_UNIQUE_ID}" + export BLD_ARTIFACT_PATH="${ARTIFACT_STAGING_DIR}/${FEEDSTOCK_NAME}_${BLD_ARTIFACT_PREFIX}_${ARCHIVE_UNIQUE_ID}.zip" + + ( startgroup "Archive conda build directory" ) 2> /dev/null + + # Try 7z and fall back to zip if it fails (for cross-platform use) + if ! 7z a "$BLD_ARTIFACT_PATH" "$CONDA_BLD_DIR" '-xr!.git/' '-xr!_*_env*/' '-xr!*_cache/' -bb; then + pushd "$CONDA_BLD_DIR" + zip -r -y -T "$BLD_ARTIFACT_PATH" . -x '*.git/*' '*_*_env*/*' '*_cache/*' + popd + fi + + ( endgroup "Archive conda build directory" ) 2> /dev/null + + echo "BLD_ARTIFACT_NAME: $BLD_ARTIFACT_NAME" + echo "BLD_ARTIFACT_PATH: $BLD_ARTIFACT_PATH" + + if [[ "$CI" == "azure" ]]; then + echo "##vso[task.setVariable variable=BLD_ARTIFACT_NAME]$BLD_ARTIFACT_NAME" + echo "##vso[task.setVariable variable=BLD_ARTIFACT_PATH]$BLD_ARTIFACT_PATH" + elif [[ "$CI" == "github_actions" ]]; then + echo "BLD_ARTIFACT_NAME=$BLD_ARTIFACT_NAME" >> $GITHUB_OUTPUT + echo "BLD_ARTIFACT_PATH=$BLD_ARTIFACT_PATH" >> $GITHUB_OUTPUT + fi +fi + +# Make the environments artifact zip +if [[ ! -z "$ENV_ARTIFACT_PREFIX" ]]; then + export ENV_ARTIFACT_NAME="${ENV_ARTIFACT_PREFIX}_${ARTIFACT_UNIQUE_ID}" + export ENV_ARTIFACT_PATH="${ARTIFACT_STAGING_DIR}/${FEEDSTOCK_NAME}_${ENV_ARTIFACT_PREFIX}_${ARCHIVE_UNIQUE_ID}.zip" + + ( startgroup "Archive conda build environments" ) 2> /dev/null + + # Try 7z and fall back to zip if it fails (for cross-platform use) + if ! 7z a "$ENV_ARTIFACT_PATH" -r "$CONDA_BLD_DIR"/'_*_env*/' -bb; then + pushd "$CONDA_BLD_DIR" + zip -r -y -T "$ENV_ARTIFACT_PATH" . -i '*_*_env*/*' + popd + fi + + ( endgroup "Archive conda build environments" ) 2> /dev/null + + echo "ENV_ARTIFACT_NAME: $ENV_ARTIFACT_NAME" + echo "ENV_ARTIFACT_PATH: $ENV_ARTIFACT_PATH" + + if [[ "$CI" == "azure" ]]; then + echo "##vso[task.setVariable variable=ENV_ARTIFACT_NAME]$ENV_ARTIFACT_NAME" + echo "##vso[task.setVariable variable=ENV_ARTIFACT_PATH]$ENV_ARTIFACT_PATH" + elif [[ "$CI" == "github_actions" ]]; then + echo "ENV_ARTIFACT_NAME=$ENV_ARTIFACT_NAME" >> $GITHUB_OUTPUT + echo "ENV_ARTIFACT_PATH=$ENV_ARTIFACT_PATH" >> $GITHUB_OUTPUT + fi +fi \ No newline at end of file diff --git a/.scripts/run_osx_build.sh b/.scripts/run_osx_build.sh index 9259eb93..07dff219 100755 --- a/.scripts/run_osx_build.sh +++ b/.scripts/run_osx_build.sh @@ -26,9 +26,9 @@ export CONDA_SOLVER="libmamba" export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 mamba install --update-specs --quiet --yes --channel conda-forge --strict-channel-priority \ - pip mamba conda-build boa conda-forge-ci-setup=4 + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" mamba update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip mamba conda-build boa conda-forge-ci-setup=4 + pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" @@ -81,7 +81,7 @@ else EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --no-test" fi - conda mambabuild ./recipe -m ./.ci_support/${CONFIG}.yaml \ + conda-build ./recipe -m ./.ci_support/${CONFIG}.yaml \ --suppress-variables ${EXTRA_CB_OPTIONS:-} \ --clobber-file ./.ci_support/clobber_${CONFIG}.yaml \ --extra-meta flow_run_id="$flow_run_id" remote_url="$remote_url" sha="$sha" diff --git a/.scripts/run_win_build.bat b/.scripts/run_win_build.bat index 48734de9..6d546976 100755 --- a/.scripts/run_win_build.bat +++ b/.scripts/run_win_build.bat @@ -24,7 +24,7 @@ set "CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1" :: Provision the necessary dependencies to build the recipe later echo Installing dependencies -mamba.exe install "python=3.10" pip mamba conda-build boa conda-forge-ci-setup=4 -c conda-forge --strict-channel-priority --yes +mamba.exe install "python=3.10" pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" -c conda-forge --strict-channel-priority --yes if !errorlevel! neq 0 exit /b !errorlevel! :: Set basic configuration @@ -55,7 +55,7 @@ call :end_group :: Build the recipe echo Building recipe -conda.exe mambabuild "recipe" -m .ci_support\%CONFIG%.yaml --suppress-variables %EXTRA_CB_OPTIONS% +conda-build.exe "recipe" -m .ci_support\%CONFIG%.yaml --suppress-variables %EXTRA_CB_OPTIONS% if !errorlevel! neq 0 exit /b !errorlevel! :: Prepare some environment variables for the upload step diff --git a/README.md b/README.md old mode 100755 new mode 100644 diff --git a/azure-pipelines.yml b/azure-pipelines.yml old mode 100755 new mode 100644 index 6b346f50..e5306da9 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -4,5 +4,5 @@ jobs: - template: ./.azure-pipelines/azure-pipelines-linux.yml - - template: ./.azure-pipelines/azure-pipelines-win.yml - - template: ./.azure-pipelines/azure-pipelines-osx.yml \ No newline at end of file + - template: ./.azure-pipelines/azure-pipelines-osx.yml + - template: ./.azure-pipelines/azure-pipelines-win.yml \ No newline at end of file diff --git a/build-locally.py b/build-locally.py index 3f4b7a79..e0d408d0 100755 --- a/build-locally.py +++ b/build-locally.py @@ -64,8 +64,9 @@ def verify_config(ns): elif ns.config.startswith("osx"): if "OSX_SDK_DIR" not in os.environ: raise RuntimeError( - "Need OSX_SDK_DIR env variable set. Run 'export OSX_SDK_DIR=SDKs' " - "to download the SDK automatically to 'SDKs/MacOSX.sdk'. " + "Need OSX_SDK_DIR env variable set. Run 'export OSX_SDK_DIR=$PWD/SDKs' " + "to download the SDK automatically to '$PWD/SDKs/MacOSX.sdk'. " + "Note: OSX_SDK_DIR must be set to an absolute path. " "Setting this variable implies agreement to the licensing terms of the SDK by Apple." ) diff --git a/conda-forge.yml b/conda-forge.yml index 111e9100..218795f9 100644 --- a/conda-forge.yml +++ b/conda-forge.yml @@ -1,3 +1,5 @@ +azure: + store_build_artifacts: true build_platform: linux_aarch64: linux_64 linux_ppc64le: linux_64 @@ -11,5 +13,4 @@ provider: linux_ppc64le: default conda_build: pkg_format: '2' -remote-ci-setup: [conda-forge-ci-setup=3, packaging] # temporary workaround test: native_and_emulated diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 25187b60..cf01d7af 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -9,10 +9,13 @@ source: url: https://github.com/conda/{{ name }}/archive/refs/tags/{{ version }}.tar.gz sha256: c534d2cccfba8d8bab85ae54cd02ffc1c02037dbbea394dda3b55b668ab35f1f patches: - - patches/gh4867.patch + # backport https://github.com/conda/conda-build/pull/4867 + - patches/0001-add-warning-and-return-empty-string.patch + # backport https://github.com/conda/conda-build/pull/5195 + - patches/0002-Fix-stdlib-being-recognized-in-variant-hash-inputs-5.patch build: - number: 0 + number: 1 script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv entry_points: - conda-build = conda_build.cli.main_build:execute diff --git a/recipe/patches/gh4867.patch b/recipe/patches/0001-add-warning-and-return-empty-string.patch similarity index 76% rename from recipe/patches/gh4867.patch rename to recipe/patches/0001-add-warning-and-return-empty-string.patch index c250b41e..14389b0f 100644 --- a/recipe/patches/gh4867.patch +++ b/recipe/patches/0001-add-warning-and-return-empty-string.patch @@ -1,14 +1,16 @@ -From 74c48facd64e9c682e9387b186ef0b08a9b52548 Mon Sep 17 00:00:00 2001 +From 8be2f4af1a0df2505df81319ce151953b622e952 Mon Sep 17 00:00:00 2001 From: Finn Womack Date: Thu, 20 Apr 2023 13:58:00 -0700 -Subject: [PATCH 1/8] add warning and return empty string +Subject: [PATCH 1/2] add warning and return empty string --- - conda_build/windows.py | 5 +++++ - 1 file changed, 5 insertions(+) + conda_build/windows.py | 3 +++ + news/4867-arm64-msvc-env-cmd-no-op | 19 +++++++++++++++++++ + 2 files changed, 22 insertions(+) + create mode 100644 news/4867-arm64-msvc-env-cmd-no-op diff --git a/conda_build/windows.py b/conda_build/windows.py -index 84da4a0f..1639c554 100644 +index ba53abf8..e3828d1e 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -110,6 +110,9 @@ def msvc_env_cmd(bits, config, override=None): diff --git a/recipe/patches/0002-Fix-stdlib-being-recognized-in-variant-hash-inputs-5.patch b/recipe/patches/0002-Fix-stdlib-being-recognized-in-variant-hash-inputs-5.patch new file mode 100644 index 00000000..97343fd0 --- /dev/null +++ b/recipe/patches/0002-Fix-stdlib-being-recognized-in-variant-hash-inputs-5.patch @@ -0,0 +1,128 @@ +From f3ff2a6ef575363e919d3249b8b3e9e8429186ee Mon Sep 17 00:00:00 2001 +From: Marcel Bargull +Date: Wed, 28 Feb 2024 17:06:00 +0100 +Subject: [PATCH 2/2] Fix stdlib being recognized in variant hash inputs + (#5195) + +* Test stdlib is recognized in variant hash inputs +* Fix stdlib being recognized in variant hash inputs +* Test c_stdlib* inclusion in Metadata.get_used_vars + +This function is used downstream in conda-forge's conda-smithy, so let's +test against this explicitly, too. + +--------- + +Signed-off-by: Marcel Bargull +Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> +--- + conda_build/variants.py | 18 ++++++++++-------- + news/5195-fix-stdlib-variant | 19 +++++++++++++++++++ + tests/test_metadata.py | 19 ++++++++++++------- + 3 files changed, 41 insertions(+), 15 deletions(-) + create mode 100644 news/5195-fix-stdlib-variant + +diff --git a/conda_build/variants.py b/conda_build/variants.py +index d798a6e7..2ece5f4b 100644 +--- a/conda_build/variants.py ++++ b/conda_build/variants.py +@@ -727,15 +727,17 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): + recipe_lines = recipe_text.splitlines() + for v in variant: + all_res = [] +- compiler_match = re.match(r"(.*?)_compiler(_version)?$", v) +- if compiler_match and not selectors_only: +- compiler_lang = compiler_match.group(1) +- compiler_regex = r"\{\s*compiler\([\'\"]%s[\"\'][^\{]*?\}" % re.escape( +- compiler_lang ++ target_match = re.match(r"(.*?)_(compiler|stdlib)(_version)?$", v) ++ if target_match and not selectors_only: ++ target_lang = target_match.group(1) ++ target_kind = target_match.group(2) ++ target_lang_regex = re.escape(target_lang) ++ target_regex = ( ++ rf"\{{\s*{target_kind}\([\'\"]{target_lang_regex}[\"\'][^\{{]*?\}}" + ) +- all_res.append(compiler_regex) ++ all_res.append(target_regex) + variant_lines = [ +- line for line in recipe_lines if v in line or compiler_lang in line ++ line for line in recipe_lines if v in line or target_lang in line + ] + else: + variant_lines = [ +@@ -760,7 +762,7 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): + all_res = r"|".join(all_res) + if any(re.search(all_res, line) for line in variant_lines): + used_variables.add(v) +- if v in ("c_compiler", "cxx_compiler"): ++ if v in ("c_stdlib", "c_compiler", "cxx_compiler"): + if "CONDA_BUILD_SYSROOT" in variant: + used_variables.add("CONDA_BUILD_SYSROOT") + return used_variables +diff --git a/news/5195-fix-stdlib-variant b/news/5195-fix-stdlib-variant +new file mode 100644 +index 00000000..526692f2 +--- /dev/null ++++ b/news/5195-fix-stdlib-variant +@@ -0,0 +1,19 @@ ++### Enhancements ++ ++* ++ ++### Bug fixes ++ ++* Fix stdlib being recognized in variant hash inputs. (#5190 via #5195) ++ ++### Deprecations ++ ++* ++ ++### Docs ++ ++* ++ ++### Other ++ ++* +diff --git a/tests/test_metadata.py b/tests/test_metadata.py +index 05e67b54..e89cb5fe 100644 +--- a/tests/test_metadata.py ++++ b/tests/test_metadata.py +@@ -230,16 +230,16 @@ def test_compiler_metadata_cross_compiler(): + + + @pytest.mark.parametrize( +- "platform,arch,stdlibs", ++ "platform,arch,stdlib,stdlib_version", + [ +- ("linux", "64", {"sysroot_linux-64 2.12.*"}), +- ("linux", "aarch64", {"sysroot_linux-aarch64 2.17.*"}), +- ("osx", "64", {"macosx_deployment_target_osx-64 10.13.*"}), +- ("osx", "arm64", {"macosx_deployment_target_osx-arm64 11.0.*"}), ++ ("linux", "64", "sysroot", "2.12"), ++ ("linux", "aarch64", "sysroot", "2.17"), ++ ("osx", "64", "macosx_deployment_target", "10.13"), ++ ("osx", "arm64", "macosx_deployment_target", "11.0"), + ], + ) + def test_native_stdlib_metadata( +- platform: str, arch: str, stdlibs: set[str], testing_config ++ platform: str, arch: str, stdlib: str, stdlib_version: str, testing_config + ): + testing_config.platform = platform + metadata = api.render( +@@ -253,7 +253,12 @@ def test_native_stdlib_metadata( + bypass_env_check=True, + python="3.11", # irrelevant + )[0][0] +- assert stdlibs <= set(metadata.meta["requirements"]["host"]) ++ stdlib_req = f"{stdlib}_{platform}-{arch} {stdlib_version}.*" ++ assert stdlib_req in metadata.meta["requirements"]["host"] ++ assert {"c_stdlib", "c_stdlib_version"} <= metadata.get_used_vars() ++ hash_contents = metadata.get_hash_contents() ++ assert stdlib == hash_contents["c_stdlib"] ++ assert stdlib_version == hash_contents["c_stdlib_version"] + + + def test_hash_build_id(testing_metadata):