Skip to content

Commit

Permalink
chore: Add documentation for maintainer development (#3025)
Browse files Browse the repository at this point in the history
* Maintainer test docs

Signed-off-by: Kevin Zhang <kzhang@tecton.ai>

* Add links from contributing and development guide

Signed-off-by: Kevin Zhang <kzhang@tecton.ai>
  • Loading branch information
kevjumba authored Aug 6, 2022
1 parent 8bc53bd commit fe9a0bd
Show file tree
Hide file tree
Showing 18 changed files with 526 additions and 65 deletions.
159 changes: 159 additions & 0 deletions .github/fork_workflows/fork_pr_integration_tests_aws.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
name: fork-pr-integration-tests-aws

on: [pull_request]

jobs:
build-docker-image:
if: github.repository == 'your github repo' # swap here with your project id
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
# code from the PR.
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
install: true
- name: Set up AWS SDK
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
- name: Set ECR image tag
id: image-tag
run: echo "::set-output name=DOCKER_IMAGE_TAG::`git rev-parse HEAD`"
- name: Cache Public ECR Image
id: lambda_python_3_9
uses: actions/cache@v2
with:
path: ~/cache
key: lambda_python_3_9
- name: Handle Cache Miss (pull public ECR image & save it to tar file)
if: steps.cache-primes.outputs.cache-hit != 'true'
run: |
mkdir -p ~/cache
docker pull public.ecr.aws/lambda/python:3.9
docker save public.ecr.aws/lambda/python:3.9 -o ~/cache/lambda_python_3_9.tar
- name: Handle Cache Hit (load docker image from tar file)
if: steps.cache-primes.outputs.cache-hit == 'true'
run: |
docker load -i ~/cache/lambda_python_3_9.tar
- name: Build and push
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
ECR_REPOSITORY: feast-python-server
run: |
docker build \
--file sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile \
--tag $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} \
--load \
.
docker push $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }}
outputs:
DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }}
integration-test-python:
if: github.repository == 'your github repo' # swap here with your project id
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: [ "3.8" ]
os: [ ubuntu-latest ]
env:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python-version }}
services:
redis:
image: redis
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
# code from the PR.
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v2
id: setup-python
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- name: Setup Go
id: setup-go
uses: actions/setup-go@v2
with:
go-version: 1.18.0
- name: Set up AWS SDK
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Use AWS CLI
run: aws sts get-caller-identity
- name: Upgrade pip version
run: |
pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: |
${{ steps.pip-cache.outputs.dir }}
/opt/hostedtoolcache/Python
/Users/runner/hostedtoolcache/Python
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt update
sudo apt install -y -V ca-certificates lsb-release wget
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt update
sudo apt install -y -V libarrow-dev
- name: Install apache-arrow on macos
if: matrix.os == 'macOS-latest'
run: brew install apache-arrow
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
run: |
docker pull vishnunair/docker-redis-cluster:latest
docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster
- name: Test python
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
env:
FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }}
run: |
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "aws and not Snowflake and not BigQuery"
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not Snowflake and not BigQuery"
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "dynamo and not Snowflake and not BigQuery"
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Redshift and not Snowflake and not BigQuery"
97 changes: 97 additions & 0 deletions .github/fork_workflows/fork_pr_integration_tests_gcp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
name: fork-pr-integration-tests-gcp

on: [pull_request]

jobs:
integration-test-python:
if: github.repository == 'your github repo' # swap here with your project id
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: [ "3.8" ]
os: [ ubuntu-latest ]
env:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python-version }}
services:
redis:
image: redis
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
# code from the PR.
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v2
id: setup-python
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- name: Setup Go
id: setup-go
uses: actions/setup-go@v2
with:
go-version: 1.18.0
- name: Set up gcloud SDK
uses: google-github-actions/setup-gcloud@v0
with:
project_id: ${{ secrets.GCP_PROJECT_ID }}
service_account_key: ${{ secrets.GCP_SA_KEY }}
export_default_credentials: true
- name: Use gcloud CLI
run: gcloud info
- name: Upgrade pip version
run: |
pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: |
${{ steps.pip-cache.outputs.dir }}
/opt/hostedtoolcache/Python
/Users/runner/hostedtoolcache/Python
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt update
sudo apt install -y -V ca-certificates lsb-release wget
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt update
sudo apt install -y -V libarrow-dev
- name: Install apache-arrow on macos
if: matrix.os == 'macOS-latest'
run: brew install apache-arrow
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
run: |
docker pull vishnunair/docker-redis-cluster:latest
docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster
- name: Test python
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
# Run only BigQuery and File tests without dynamo and redshift tests.
run: |
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "BigQuery and not dynamo and not Redshift and not Snowflake"
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Snowflake"
96 changes: 96 additions & 0 deletions .github/fork_workflows/fork_pr_integration_tests_snowflake.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
name: fork-pr-integration-tests-snowflake

on: [pull_request]

jobs:
integration-test-python:
if: github.repository == 'your github repo' # swap here with your project id
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: [ "3.8" ]
os: [ ubuntu-latest ]
env:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python-version }}
services:
redis:
image: redis
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
# code from the PR.
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v2
id: setup-python
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- name: Setup Go
id: setup-go
uses: actions/setup-go@v2
with:
go-version: 1.18.0

- name: Upgrade pip version
run: |
pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: |
${{ steps.pip-cache.outputs.dir }}
/opt/hostedtoolcache/Python
/Users/runner/hostedtoolcache/Python
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt update
sudo apt install -y -V ca-certificates lsb-release wget
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt update
sudo apt install -y -V libarrow-dev
- name: Install apache-arrow on macos
if: matrix.os == 'macOS-latest'
run: brew install apache-arrow
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
run: |
docker pull vishnunair/docker-redis-cluster:latest
docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster
- name: Test python
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
env:
SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }}
SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }}
SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }}
SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }}
SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }}
# Run only Snowflake BigQuery and File tests without dynamo and redshift tests.
run: |
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Snowflake and not dynamo and not Redshift and not Bigquery and not gcp"
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Bigquery and not gcp"
4 changes: 4 additions & 0 deletions .github/workflows/java_master_only.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ on:

jobs:
build-docker-images:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
strategy:
matrix:
Expand Down Expand Up @@ -46,6 +47,7 @@ jobs:
fi
lint-java:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand All @@ -55,6 +57,7 @@ jobs:
run: make lint-java

unit-test-java:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand All @@ -80,6 +83,7 @@ jobs:
path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/

integration-test:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand Down
Loading

0 comments on commit fe9a0bd

Please sign in to comment.