refactor(framework) Support uint64 in Scalar and ConfigsRecordValue #15993
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: E2E | |
on: | |
push: | |
branches: | |
- main | |
pull_request: | |
branches: | |
- main | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/main' && github.run_id || github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
env: | |
FLWR_TELEMETRY_ENABLED: 0 | |
ARTIFACT_BUCKET: artifact.flower.ai | |
jobs: | |
wheel: | |
runs-on: ubuntu-22.04 | |
name: Build, test and upload wheel | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Bootstrap | |
uses: ./.github/actions/bootstrap | |
- name: Install dependencies (mandatory only) | |
run: python -m poetry install | |
- name: Build wheel | |
run: ./dev/build.sh | |
- name: Test wheel | |
run: ./dev/test-wheel.sh | |
- name: Upload wheel | |
if: ${{ github.repository == 'adap/flower' && !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }} | |
id: upload | |
env: | |
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
run: | | |
cd ./dist | |
echo "WHL_PATH=$(ls *.whl)" >> "$GITHUB_OUTPUT" | |
sha_short=$(git rev-parse --short HEAD) | |
echo "SHORT_SHA=$sha_short" >> "$GITHUB_OUTPUT" | |
[ -z "${{ github.head_ref }}" ] && dir="${{ github.ref_name }}" || dir="pr/${{ github.head_ref }}" | |
echo "DIR=$dir" >> "$GITHUB_OUTPUT" | |
aws s3 cp --content-disposition "attachment" --cache-control "no-cache" ./ s3://${{ env.ARTIFACT_BUCKET }}/py/$dir/$sha_short --recursive | |
aws s3 cp --content-disposition "attachment" --cache-control "no-cache" ./ s3://${{ env.ARTIFACT_BUCKET }}/py/$dir/latest --recursive | |
outputs: | |
whl_path: ${{ steps.upload.outputs.WHL_PATH }} | |
short_sha: ${{ steps.upload.outputs.SHORT_SHA }} | |
dir: ${{ steps.upload.outputs.DIR }} | |
frameworks: | |
runs-on: ubuntu-22.04 | |
timeout-minutes: 10 | |
needs: wheel | |
# Using approach described here: | |
# https://docs.github.com/en/actions/using-jobs/using-a-matrix-for-your-jobs | |
strategy: | |
matrix: | |
include: | |
- directory: e2e-bare | |
e2e: e2e_bare | |
- directory: e2e-bare-https | |
e2e: e2e_bare_https | |
- directory: e2e-bare-auth | |
e2e: e2e_bare_auth | |
- directory: e2e-jax | |
e2e: e2e_jax | |
- directory: e2e-pytorch | |
e2e: e2e_pytorch | |
dataset: | | |
from torchvision.datasets import CIFAR10 | |
CIFAR10('./data', download=True) | |
- directory: e2e-tensorflow | |
e2e: e2e_tensorflow | |
dataset: | | |
import tensorflow as tf | |
tf.keras.datasets.cifar10.load_data() | |
- directory: e2e-opacus | |
e2e: e2e_opacus | |
dataset: | | |
from torchvision.datasets import CIFAR10 | |
CIFAR10('./data', download=True) | |
- directory: e2e-pytorch-lightning | |
e2e: e2e_pytorch_lightning | |
dataset: | | |
from torchvision.datasets import MNIST | |
MNIST('./data', download=True) | |
- directory: e2e-scikit-learn | |
e2e: e2e_scikit_learn | |
dataset: | | |
import openml | |
openml.datasets.get_dataset(554) | |
- directory: e2e-fastai | |
e2e: e2e_fastai | |
dataset: | | |
from fastai.vision.all import untar_data, URLs | |
untar_data(URLs.MNIST) | |
- directory: e2e-pandas | |
e2e: e2e_pandas | |
dataset: | | |
from pathlib import Path | |
from sklearn.datasets import load_iris | |
Path('data').mkdir(exist_ok=True) | |
load_iris(as_frame=True)['data'].to_csv('./data/client.csv') | |
name: Framework / ${{ matrix.directory }} | |
defaults: | |
run: | |
working-directory: e2e/${{ matrix.directory }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.9 | |
- name: Install build tools | |
run: | | |
python -m pip install -U pip==23.3.1 | |
shell: bash | |
# Using approach described here for Python location caching: | |
# https://blog.allenai.org/python-caching-in-github-actions-e9452698e98d | |
- name: Cache Python location | |
id: cache-restore-python | |
uses: actions/cache/restore@v4 | |
with: | |
path: ${{ env.pythonLocation }} | |
key: pythonloc-${{ runner.os }}-${{ matrix.directory }}-${{ env.pythonLocation }}-${{ hashFiles(format('./e2e/{0}/pyproject.toml', matrix.directory)) }} | |
- name: Install dependencies | |
run: python -m pip install --upgrade . | |
- name: Install Flower wheel from artifact store | |
if: ${{ github.repository == 'adap/flower' && !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }} | |
run: | | |
python -m pip install https://${{ env.ARTIFACT_BUCKET }}/py/${{ needs.wheel.outputs.dir }}/${{ needs.wheel.outputs.short_sha }}/${{ needs.wheel.outputs.whl_path }} | |
- name: Download dataset | |
if: ${{ matrix.dataset }} | |
run: python -c "${{ matrix.dataset }}" | |
- name: Run edge client test | |
if: ${{ matrix.directory != 'e2e-bare-auth' }} | |
working-directory: e2e/${{ matrix.directory }}/${{ matrix.e2e }} | |
run: ./../../test_legacy.sh "${{ matrix.directory }}" | |
- name: Run virtual client test | |
if: ${{ matrix.directory != 'e2e-bare-auth' }} | |
run: python simulation.py | |
- name: Run simulation engine test | |
if: ${{ matrix.directory == 'e2e-pytorch' || matrix.directory == 'e2e-tensorflow'}} | |
run: python simulation_next.py | |
- name: Run driver test | |
if: ${{ matrix.directory != 'e2e-bare-auth' }} | |
run: ./../test_superlink.sh "${{ matrix.directory }}" | |
- name: Run driver test with REST | |
if: ${{ matrix.directory == 'e2e-bare' }} | |
run: ./../test_superlink.sh bare rest | |
- name: Run driver test with SQLite database | |
if: ${{ matrix.directory == 'e2e-bare' }} | |
run: ./../test_superlink.sh bare sqlite | |
- name: Run driver test with client authentication | |
if: ${{ matrix.directory == 'e2e-bare-auth' }} | |
run: ./../test_superlink.sh "${{ matrix.directory }}" client-auth | |
- name: Run reconnection test with SQLite database | |
if: ${{ matrix.directory == 'e2e-bare' }} | |
run: ./../test_reconnection.sh sqlite | |
- name: Cache save Python location | |
id: cache-save-python | |
uses: actions/cache/save@v4 | |
if: ${{ github.ref_name == 'main' && !steps.cache-restore-python.outputs.cache-hit }} | |
with: | |
path: ${{ env.pythonLocation }} | |
key: pythonloc-${{ runner.os }}-${{ matrix.directory }}-${{ env.pythonLocation }}-${{ hashFiles(format('./e2e/{0}/pyproject.toml', matrix.directory)) }} | |
strategies: | |
runs-on: ubuntu-22.04 | |
timeout-minutes: 10 | |
needs: wheel | |
strategy: | |
matrix: | |
strat: ["FedMedian", "FedTrimmedAvg", "QFedAvg", "FaultTolerantFedAvg", "FedAvgM", "FedAdam", "FedAdagrad", "FedYogi"] | |
name: Strategy / ${{ matrix.strat }} | |
defaults: | |
run: | |
working-directory: e2e/strategies | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Bootstrap | |
uses: ./.github/actions/bootstrap | |
- name: Install dependencies | |
run: | | |
python -m poetry install | |
- name: Install Flower wheel from artifact store | |
if: ${{ github.repository == 'adap/flower' && !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }} | |
run: | | |
python -m pip install https://${{ env.ARTIFACT_BUCKET }}/py/${{ needs.wheel.outputs.dir }}/${{ needs.wheel.outputs.short_sha }}/${{ needs.wheel.outputs.whl_path }} | |
- name: Cache Datasets | |
uses: actions/cache@v4 | |
with: | |
path: "~/.keras" | |
key: keras-datasets | |
- name: Download Datasets | |
run: | | |
python -c "import tensorflow as tf; tf.keras.datasets.mnist.load_data()" | |
- name: Test strategies | |
run: | | |
python test.py "${{ matrix.strat }}" | |
templates: | |
runs-on: ubuntu-22.04 | |
timeout-minutes: 10 | |
needs: wheel | |
strategy: | |
matrix: | |
framework: ["numpy", "pytorch", "tensorflow", "jax", "sklearn"] | |
name: Template / ${{ matrix.framework }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Bootstrap | |
uses: ./.github/actions/bootstrap | |
- name: Install Flower from repo | |
if: ${{ github.repository != 'adap/flower' || github.event.pull_request.head.repo.fork || github.actor == 'dependabot[bot]' }} | |
run: | | |
python -m pip install . | |
- name: Install Flower wheel from artifact store | |
if: ${{ github.repository == 'adap/flower' && !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }} | |
run: | | |
python -m pip install https://${{ env.ARTIFACT_BUCKET }}/py/${{ needs.wheel.outputs.dir }}/${{ needs.wheel.outputs.short_sha }}/${{ needs.wheel.outputs.whl_path }} | |
- name: Create project and install it | |
run: | | |
flwr new tmp-${{ matrix.framework }} --framework ${{ matrix.framework }} --username gh_ci | |
cd tmp-${{ matrix.framework }} | |
pip install . | |
- name: Run project | |
run: | | |
cd tmp-${{ matrix.framework }} | |
flwr run --run-config num-server-rounds=1 2>&1 | tee flwr_output.log | |
if grep -q "ERROR" flwr_output.log; then | |
exit 1 | |
fi |