Skip to content

New view for Data Dict JSON to support chunk load #13303

New view for Data Dict JSON to support chunk load

New view for Data Dict JSON to support chunk load #13303

Workflow file for this run

name: commcare-hq tests
on:
pull_request:
branches:
- master
- hotfix-deploy
- mjr/add-change-meta-context
- mjr/new_dedupe_model
- mjr/dedupe_handle_closed_cases
schedule:
# see corehq/apps/hqadmin/management/commands/static_analysis.py
- cron: '47 12 * * *'
jobs:
tests:
runs-on: ubuntu-22.04
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
include:
- {TEST: python, NOSE_DIVIDED_WE_RUN: '05'}
- {TEST: python, NOSE_DIVIDED_WE_RUN: '6a'}
- {TEST: python, NOSE_DIVIDED_WE_RUN: 'bf'}
- {TEST: python-sharded-and-javascript}
env:
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DATADOG_APP_KEY: ${{ secrets.DATADOG_APP_KEY }}
REUSE_DB: true # do not drop databases on completion to save time
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Docker info
run: |
docker version
docker compose version
- name: Docker login
env:
TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
if: env.TOKEN != ''
uses: docker/login-action@v3
with:
username: dimagi
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Run tests
env:
TEST: ${{ matrix.TEST }}
NOSE_DIVIDED_WE_RUN: ${{ matrix.NOSE_DIVIDED_WE_RUN }}
JS_SETUP: yes
KAFKA_HOSTNAME: kafka
STRIPE_PRIVATE_KEY: ${{ secrets.STRIPE_PRIVATE_KEY }}
run: scripts/docker test --noinput --stop -v --divided-we-run=${{ matrix.NOSE_DIVIDED_WE_RUN }} --divide-depth=1 --with-timing --with-flaky --threshold=10 --max-test-time=29
- name: "Codecov upload"
env:
TOKEN: ${{ secrets.CODECOV_TOKEN }}
if: env.TOKEN != ''
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
fail_ci_if_error: true
- name: Stop containers
if: always()
run: scripts/docker down
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-artifacts
path: artifacts
if-no-files-found: ignore
retention-days: 7