feat: encrypted flight api via cloud proxy #4817
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: GlareDB CI | |
on: | |
push: | |
branches: | |
- main | |
pull_request: | |
workflow_dispatch: | |
concurrency: | |
group: build-ci-${{ github.ref }} | |
cancel-in-progress: true | |
jobs: | |
build: | |
name: Build | |
runs-on: ubuntu-latest-8-cores | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: build | |
run: just build | |
static-analysis: | |
name: Lint and Format | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: clippy | |
run: just clippy | |
- name: format | |
run: just fmt-check | |
unit-tests: | |
name: Unit Tests | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: run tests | |
run: just unit-tests | |
python-binding-tests: | |
name: Python Binding Tests | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: build glaredb binary | |
run: just build | |
- name: build python bindings | |
run: just python build | |
- name: run python binding tests | |
run: just python test | |
nodejs-bindings-tests: | |
name: Node.js Binding Tests | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-node@v4 | |
with: | |
node-version: 20 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: build node.js bindings | |
run: just js build-debug | |
- name: run node.js binding tests | |
run: just js test | |
pg-protocol: | |
name: PG Protocol Tests | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: pg protocol test (script) | |
run: | | |
PROTOC=`just protoc && just --evaluate PROTOC` ./scripts/protocol-test.sh | |
- name: pg protocol tests (slt runner) | |
run: just slt -v 'pgproto/*' | |
sql-logic-tests: | |
name: SQL Logic Tests | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
strategy: | |
matrix: | |
protocol: ['postgres','flightsql'] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: public sql logic tests DEBUG | |
if: ${{ env.ACTIONS_STEP_DEBUG == 'true' }} | |
run: just slt -v 'sqllogictests/* --protocol=${{ matrix.protocol }}' | |
- name: public sql logic tests | |
if: ${{ env.ACTIONS_STEP_DEBUG != 'true' }} | |
run: just slt 'sqllogictests/* --protocol=${{ matrix.protocol }}' | |
process-integration-tests: | |
name: Process Integration Tests (pytest) | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- name: install poetry | |
uses: snok/install-poetry@v1 | |
- name: install python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: '3.11' | |
cache: poetry | |
cache-dependency-paths: | | |
tests/poetry.lock | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
tests/.venv/ | |
key: ${{ runner.os }}-poetry-${{ hashFiles('**/Cargo.lock') }} | |
- name: setup pytest | |
run: just pytest-setup | |
- name: run pytest | |
run: just pytest | |
service-integration-tests: | |
name: Service Integration Tests (SLT) | |
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.owner.login == 'GlareDB' | |
runs-on: ubuntu-latest-8-cores | |
needs: ["build"] | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: GCP authenticate | |
uses: google-github-actions/auth@v2 | |
with: | |
credentials_json: ${{ secrets.GCP_SERVICE_ACCOUNT_JSON }} | |
- name: setup GCP | |
uses: google-github-actions/setup-gcloud@v2 | |
- name: prepare testdata | |
run: ./scripts/prepare-testdata.sh | |
- name: run tests (slt) | |
env: | |
GCP_SERVICE_ACCOUNT_KEY: ${{ secrets.GCP_SERVICE_ACCOUNT_JSON }} | |
GCP_PROJECT_ID: glaredb-artifacts | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
AZURE_ACCESS_KEY: ${{ secrets.AZURE_ACCESS_KEY }} | |
AZURE_ACCOUNT: ${{ secrets.AZURE_ACCOUNT }} | |
MINIO_ACCESS_KEY: glaredb | |
MINIO_SECRET_KEY: glaredb_test | |
TEST_BUCKET: glaredb-test-bucket | |
run: | | |
# Prepare SLT (BigQuery) | |
if ./scripts/files-changed-in-branch.sh \ | |
"scripts/prepare-testdata.sh" \ | |
"scripts/create-test-bigquery-db.sh" \ | |
"testdata/sqllogictests_datasources_common/data" \ | |
"testdata/sqllogictests_bigquery/data" | |
then | |
export GCP_PROJECT_ID=glaredb-dev-playground | |
export BIGQUERY_DATASET_ID=$(./scripts/create-test-bigquery-db.sh) | |
else | |
export BIGQUERY_DATASET_ID=glaredb_test | |
fi | |
# Prepare SLT (Object store) | |
export GCS_BUCKET_NAME=glaredb-test | |
export AWS_S3_REGION=us-east-1 | |
export AWS_S3_BUCKET_NAME=glaredb-test | |
# Unset application default credentials. We don't want to unknowingly | |
# depend on this. | |
unset GOOGLE_APPLICATION_CREDENTIALS | |
# Prepare SLT (Postgres) | |
POSTGRES_TEST_DB=$(./scripts/create-test-postgres-db.sh) | |
export POSTGRES_CONN_STRING=$(echo "$POSTGRES_TEST_DB" | sed -n 1p) | |
export POSTGRES_TUNNEL_SSH_CONN_STRING=$(echo "$POSTGRES_TEST_DB" | sed -n 2p) | |
# Prepare SLT (MySQL) | |
MYSQL_TEST_DB=$(./scripts/create-test-mysql-db.sh) | |
export MYSQL_CONN_STRING=$(echo "$MYSQL_TEST_DB" | sed -n 1p) | |
export MYSQL_TUNNEL_SSH_CONN_STRING=$(echo "$MYSQL_TEST_DB" | sed -n 2p) | |
# Prepare SLT (MongoDB) | |
export MONGO_CONN_STRING=$(./scripts/create-test-mongo-db.sh) | |
# Prepare SLT (SQL Server) | |
export SQL_SERVER_CONN_STRING=$(./scripts/create-test-sqlserver-db.sh) | |
# Prepare SLT (MinIO) | |
./scripts/create-test-minio-store.sh | |
# Prepare SLT (fake GCS server) | |
./scripts/create-test-gcs-store.sh | |
echo "-------------------------------- WITHOUT TUNNEL TEST --------------------------------" | |
# Run all data source tests without running tunnel tests or the basic | |
# SLT tests. | |
just slt --exclude 'sqllogictests/*' --exclude '*/tunnels/ssh' --exclude 'sqllogictests_snowflake/*' | |
echo "-------------------------------- WITH TUNNEL TEST --------------------------------" | |
# SSH tests are prone to fail if we make a lot of connections at the | |
# same time. Hence, it makes sense to run all the SSH tests one-by-one | |
# in order to test the SSH tunnels (which is our aim). | |
just sql-logic-tests --jobs=1 '*/tunnels/ssh' | |
echo "-------------------------------- RPC TESTS --------------------------------" | |
just rpc-tests | |
just sql-logic-tests --protocol=rpc 'sqllogictests_bigquery/*' | |
just sql-logic-tests --protocol=rpc 'sqllogictests_iceberg/*' | |
just sql-logic-tests --protocol=rpc 'sqllogictests_native/*' | |
just sql-logic-tests --protocol=rpc 'sqllogictests_object_store/*' | |
just sql-logic-tests --protocol=rpc 'sqllogictests_sqlserver/*' | |
just sql-logic-tests --protocol=rpc --exclude '*/tunnels/ssh' 'sqllogictests_mongodb/*' | |
just sql-logic-tests --protocol=rpc --exclude '*/tunnels/ssh' 'sqllogictests_mysql/*' | |
just sql-logic-tests --protocol=rpc --exclude '*/tunnels/ssh' 'sqllogictests_postgres/*' | |
echo "-------------------------- REMOTE DATA STORAGE TESTS --------------------------------" | |
# Test using a remote object store for storing databases and catalog | |
# MinIO (S3) | |
just sql-logic-tests --location http://localhost:9000 \ | |
--option access_key_id=$MINIO_ACCESS_KEY \ | |
--option secret_access_key=$MINIO_SECRET_KEY \ | |
--option bucket=$TEST_BUCKET \ | |
'sqllogictests/*' \ | |
'sqllogictests_native/*' | |
# MinIO (S3) but with a sub-directory path | |
just slt -l http://localhost:9000/$TEST_BUCKET/path/to/folder \ | |
-o access_key_id=$MINIO_ACCESS_KEY \ | |
-o secret_access_key=$MINIO_SECRET_KEY \ | |
'sqllogictests/*' | |
# Fake GCS server with a sub-directory path; run with two different folder paths to assert no conflicts arise | |
just slt -l gs://$TEST_BUCKET/path/to/folder/1 -o service_account_path=/tmp/fake-gcs-creds.json 'sqllogictests_native/*' | |
just slt -l gs://$TEST_BUCKET/path/to/folder/2 -o service_account_path=/tmp/fake-gcs-creds.json 'sqllogictests_native/*' | |
service-integration-tests-snowflake: | |
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.owner.login == 'GlareDB' | |
name: Snowflake Service Integration Tests (SLT::Snowflake) | |
runs-on: ubuntu-latest-8-cores | |
needs: ["service-integration-tests"] | |
concurrency: | |
group: snowflake-integration-tests | |
cancel-in-progress: false | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- uses: extractions/setup-just@v1 | |
- uses: actions/cache@v3 | |
name: cache | |
with: | |
path: | | |
~/.cargo/ | |
target/ | |
!target/**/glaredb | |
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
- name: snowflake setup (SnowSQL) | |
run: | | |
curl -o snowsql.bash \ | |
https://sfc-repo.snowflakecomputing.com/snowsql/bootstrap/1.2/linux_x86_64/snowsql-1.2.24-linux_x86_64.bash | |
mkdir -p ~/bin | |
SNOWSQL_DEST=~/bin SNOWSQL_LOGIN_SHELL=~/.profile bash snowsql.bash | |
- name: prepare testdata | |
run: ./scripts/prepare-testdata.sh | |
- name: run tests (slt) | |
env: | |
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME }} | |
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | |
run: | | |
# Prepare SLT (Snowflake) | |
export PATH="$HOME/bin:$PATH" | |
if ./scripts/files-changed-in-branch.sh \ | |
"scripts/prepare-testdata.sh" \ | |
"scripts/create-test-snowflake-db.sh" \ | |
"testdata/sqllogictests_datasources_common/data" \ | |
"testdata/sqllogictests_snowflake/data" | |
then | |
export SNOWFLAKE_DATABASE=$(./scripts/create-test-snowflake-db.sh) | |
else | |
export SNOWFLAKE_DATABASE=glaredb_test | |
fi | |
just sql-logic-tests 'sqllogictests_snowflake/*' | |
just sql-logic-tests --protocol=rpc 'sqllogictests_snowflake/*' |